signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ProcBol { /** * Skip line break characters . ' \ r \ n ' for example * @ param source text source * @ param offset current index * @ return new index */ private int skipLineBreak ( Source source , final int offset ) { } }
char p ; char c ; int index = offset ; p = source . charAt ( index ) ; index ++ ; if ( index < source . length ( ) ) { c = source . charAt ( index ) ; if ( ( c == '\n' || c == '\r' ) && c != p ) { index ++ ; } } return index ;
public class FSNamesystem { /** * Start decommissioning the specified datanode . */ void startDecommission ( DatanodeDescriptor node ) throws IOException { } }
if ( ! node . isDecommissionInProgress ( ) && ! node . isDecommissioned ( ) ) { LOG . info ( "Start Decommissioning node " + node . getName ( ) + " with " + node . numBlocks ( ) + " blocks." ) ; synchronized ( heartbeats ) { updateStats ( node , false ) ; node . startDecommission ( ) ; updateStats ( node , true ) ; } if ( ( ( Monitor ) dnthread . getRunnable ( ) ) . startDecommision ( node ) ) { node . setStartTime ( now ( ) ) ; } } else if ( node . isDecommissionInProgress ( ) ) { if ( ( ( Monitor ) dnthread . getRunnable ( ) ) . startDecommision ( node ) ) { node . setStartTime ( now ( ) ) ; } }
public class JsonLdProcessor { /** * Frames the given input using the frame according to the steps in the * < a href = * " http : / / json - ld . org / spec / latest / json - ld - framing / # framing - algorithm " > * Framing Algorithm < / a > . * @ param input * The input JSON - LD object . * @ param frame * The frame to use when re - arranging the data of input ; either * in the form of an JSON object or as IRI . * @ param opts * The { @ link JsonLdOptions } that are to be sent to the framing * algorithm . * @ return The framed JSON - LD document * @ throws JsonLdError * If there is an error while framing . */ public static Map < String , Object > frame ( Object input , Object frame , JsonLdOptions opts ) throws JsonLdError { } }
if ( frame instanceof Map ) { frame = JsonLdUtils . clone ( frame ) ; } // TODO string / IO input // 2 . Set expanded input to the result of using the expand method using // input and options . final Object expandedInput = expand ( input , opts ) ; // 3 . Set expanded frame to the result of using the expand method using // frame and options with expandContext set to null and the // frameExpansion option set to true . final Object savedExpandedContext = opts . getExpandContext ( ) ; opts . setExpandContext ( null ) ; opts . setFrameExpansion ( true ) ; final List < Object > expandedFrame = expand ( frame , opts ) ; opts . setExpandContext ( savedExpandedContext ) ; // 4 . Set context to the value of @ context from frame , if it exists , or // to a new empty // context , otherwise . final JsonLdApi api = new JsonLdApi ( expandedInput , opts ) ; final Context activeCtx = api . context . parse ( ( ( Map < String , Object > ) frame ) . get ( JsonLdConsts . CONTEXT ) ) ; final List < Object > framed = api . frame ( expandedInput , expandedFrame ) ; if ( opts . getPruneBlankNodeIdentifiers ( ) ) { JsonLdUtils . pruneBlankNodes ( framed ) ; } Object compacted = api . compact ( activeCtx , null , framed , opts . getCompactArrays ( ) ) ; final Map < String , Object > rval = activeCtx . serialize ( ) ; final boolean addGraph = ( ( ! ( compacted instanceof List ) ) && ! opts . getOmitGraph ( ) ) ; if ( addGraph && ! ( compacted instanceof List ) ) { final List < Object > tmp = new ArrayList < Object > ( ) ; tmp . add ( compacted ) ; compacted = tmp ; } if ( addGraph || ( compacted instanceof List ) ) { final String alias = activeCtx . compactIri ( JsonLdConsts . GRAPH ) ; rval . put ( alias , compacted ) ; } else if ( ! addGraph && ( compacted instanceof Map ) ) { rval . putAll ( ( Map ) compacted ) ; } JsonLdUtils . removePreserve ( activeCtx , rval , opts ) ; return rval ;
public class UrlMap { /** * < pre > * URL prefix . Uses regular expression syntax , which means regexp * special characters must be escaped , but should not contain groupings . * All URLs that begin with this prefix are handled by this handler , using the * portion of the URL after the prefix as part of the file path . * < / pre > * < code > string url _ regex = 1 ; < / code > */ public com . google . protobuf . ByteString getUrlRegexBytes ( ) { } }
java . lang . Object ref = urlRegex_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; urlRegex_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class MethodInfo { /** * メソッド名を設定する * @ param methodName メソッド名 * @ throws IllegalArgumentException methodName is empty . */ @ XmlAttribute ( name = "name" , required = true ) public void setMethodName ( final String methodName ) { } }
ArgUtils . notEmpty ( methodName , methodName ) ; this . methodName = methodName ;
public class JsonStreamer { /** * Parse the current byte buffer to find the next potential message . * The first potential serialized message data in the buffer is removed and * returned . Any delimiters at the start of the buffer will be cleared . * @ return A potential serialized JSON message or null if none found . */ private String readToDelimiter ( ) { } }
String line = null ; while ( line == null || line . isEmpty ( ) ) { int delimiterIndex = mBuffer . indexOf ( DELIMITER ) ; if ( delimiterIndex != - 1 ) { line = mBuffer . substring ( 0 , delimiterIndex ) ; mBuffer . delete ( 0 , delimiterIndex + 1 ) ; } else { line = null ; break ; } } return line ;
public class SignatureGenerator { /** * ClassSignature : : = * OptFormalTypeParameters SuperclassSignature { SuperinterfaceSignature } . */ private void genClassSignature ( TypeElement type , StringBuilder sb ) { } }
genOptFormalTypeParameters ( type . getTypeParameters ( ) , sb ) ; // JDT returns null for an interface ' s superclass , but signatures expect Object . if ( type . getKind ( ) . isInterface ( ) ) { sb . append ( JAVA_OBJECT_SIGNATURE ) ; } else { genTypeSignature ( type . getSuperclass ( ) , sb ) ; } for ( TypeMirror intrface : type . getInterfaces ( ) ) { genTypeSignature ( intrface , sb ) ; }
public class ExcelWriter { /** * 将Excel Workbook刷出到输出流 * @ param out 输出流 * @ param isCloseOut 是否关闭输出流 * @ return this * @ throws IORuntimeException IO异常 * @ since 4.4.1 */ public ExcelWriter flush ( OutputStream out , boolean isCloseOut ) throws IORuntimeException { } }
Assert . isFalse ( this . isClosed , "ExcelWriter has been closed!" ) ; try { this . workbook . write ( out ) ; out . flush ( ) ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; } finally { if ( isCloseOut ) { IoUtil . close ( out ) ; } } return this ;
public class ModelsImpl { /** * Update an entity role for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity ID . * @ param roleId The entity role ID . * @ param updateCustomPrebuiltEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > updateCustomPrebuiltEntityRoleAsync ( UUID appId , String versionId , UUID entityId , UUID roleId , UpdateCustomPrebuiltEntityRoleOptionalParameter updateCustomPrebuiltEntityRoleOptionalParameter , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateCustomPrebuiltEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId , updateCustomPrebuiltEntityRoleOptionalParameter ) , serviceCallback ) ;
public class Http2ClientStreamTransportState { /** * Inspect the raw metadata and figure out what charset is being used . */ private static Charset extractCharset ( Metadata headers ) { } }
String contentType = headers . get ( GrpcUtil . CONTENT_TYPE_KEY ) ; if ( contentType != null ) { String [ ] split = contentType . split ( "charset=" , 2 ) ; try { return Charset . forName ( split [ split . length - 1 ] . trim ( ) ) ; } catch ( Exception t ) { // Ignore and assume UTF - 8 } } return Charsets . UTF_8 ;
public class SocketBackend { /** * send data from frontend to backend ( import of data ) */ public static void sendStream ( ObjectOutputStream oos , InputStream inputStream ) throws IOException { } }
int b ; while ( ( b = inputStream . read ( ) ) >= 0 ) { oos . write ( b ) ; } oos . flush ( ) ;
public class AbstractConsistentHash { /** * Adds all elements from < code > src < / code > list that do not already exist in < code > dest < / code > list to the latter . * @ param dest List where elements are added * @ param src List of elements to add - this is never modified */ protected static void mergeLists ( List < Address > dest , List < Address > src ) { } }
for ( Address node : src ) { if ( ! dest . contains ( node ) ) { dest . add ( node ) ; } }
public class RecipientListStringAnalyser { /** * Mind : Include ' : ' suffix , e . g . for " bcc : < email address > " it is " bcc " + " : " , i . e . 3 + 1 = 4 */ private int lengthOfTypePrefix ( int type ) { } }
int length ; switch ( type ) { case EmailRecipientUtils . BCC : length = 4 ; break ; case EmailRecipientUtils . CC : length = 3 ; break ; case EmailRecipientUtils . TO : length = 0 ; break ; default : throw new IllegalArgumentException ( "Unknown type prefix: " + type ) ; } return length ;
public class ExecuteAsyncRunnable { /** * Returns true if lock succeeded , or no lock was needed . * Returns false if locking was unsuccessfull . */ protected boolean lockJobIfNeeded ( ) { } }
try { if ( job . isExclusive ( ) ) { processEngineConfiguration . getCommandExecutor ( ) . execute ( new LockExclusiveJobCmd ( job ) ) ; } } catch ( Throwable lockException ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Could not lock exclusive job. Unlocking job so it can be acquired again. Catched exception: " + lockException . getMessage ( ) ) ; } // Release the job again so it can be acquired later or by another node unacquireJob ( ) ; return false ; } return true ;
public class BaseDrawable { /** * { @ inheritDoc } */ @ Override public void draw ( @ NonNull Canvas canvas ) { } }
Rect bounds = getBounds ( ) ; if ( bounds . width ( ) == 0 || bounds . height ( ) == 0 ) { return ; } int saveCount = canvas . save ( ) ; canvas . translate ( bounds . left , bounds . top ) ; onDraw ( canvas , bounds . width ( ) , bounds . height ( ) ) ; canvas . restoreToCount ( saveCount ) ;
public class Execute { /** * set the value arguments Command - line arguments passed to the application . * @ param args value to set */ public void setArguments ( Object args ) { } }
if ( args instanceof lucee . runtime . type . Collection ) { StringBuilder sb = new StringBuilder ( ) ; lucee . runtime . type . Collection coll = ( lucee . runtime . type . Collection ) args ; // lucee . runtime . type . Collection . Key [ ] keys = coll . keys ( ) ; Iterator < Object > it = coll . valueIterator ( ) ; while ( it . hasNext ( ) ) { sb . append ( ' ' ) ; sb . append ( it . next ( ) ) ; } arguments = sb . toString ( ) ; } else if ( args instanceof String ) { arguments = " " + args . toString ( ) ; } else this . arguments = "" ;
public class FileMonitorSource { /** * Get the file lines since the actual until the last . * @ param actual Actual line identification * @ return The list of lines * @ throws Exception */ @ Override public List < MonitorLine > getLinesFrom ( Object actual ) throws Exception { } }
// TODO Enhance line retrieve to get last lines directly String line ; Integer currentLineNo = 0 ; final List < MonitorLine > result = new ArrayList < MonitorLine > ( ) ; BufferedReader in = null ; try { in = new BufferedReader ( new FileReader ( getFilename ( ) ) ) ; Integer startLine = ( actual == null ) ? 0 : ( Integer ) actual ; // read to startLine while ( currentLineNo < startLine + 1 ) { if ( in . readLine ( ) == null ) { throw new IOException ( "File too small" ) ; } currentLineNo ++ ; } // read until endLine line = in . readLine ( ) ; while ( line != null ) { result . add ( new MonitorLine ( currentLineNo , line ) ) ; currentLineNo ++ ; line = in . readLine ( ) ; } } finally { try { if ( in != null ) { in . close ( ) ; } } catch ( IOException ignore ) { } } return result ;
public class PolicyStatesInner { /** * Summarizes policy states for the resource group level policy assignment . * @ param subscriptionId Microsoft Azure subscription ID . * @ param resourceGroupName Resource group name . * @ param policyAssignmentName Policy assignment name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws QueryFailureException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the SummarizeResultsInner object if successful . */ public SummarizeResultsInner summarizeForResourceGroupLevelPolicyAssignment ( String subscriptionId , String resourceGroupName , String policyAssignmentName ) { } }
return summarizeForResourceGroupLevelPolicyAssignmentWithServiceResponseAsync ( subscriptionId , resourceGroupName , policyAssignmentName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CommerceTierPriceEntryPersistenceImpl { /** * Returns the commerce tier price entry with the primary key or returns < code > null < / code > if it could not be found . * @ param primaryKey the primary key of the commerce tier price entry * @ return the commerce tier price entry , or < code > null < / code > if a commerce tier price entry with the primary key could not be found */ @ Override public CommerceTierPriceEntry fetchByPrimaryKey ( Serializable primaryKey ) { } }
Serializable serializable = entityCache . getResult ( CommerceTierPriceEntryModelImpl . ENTITY_CACHE_ENABLED , CommerceTierPriceEntryImpl . class , primaryKey ) ; if ( serializable == nullModel ) { return null ; } CommerceTierPriceEntry commerceTierPriceEntry = ( CommerceTierPriceEntry ) serializable ; if ( commerceTierPriceEntry == null ) { Session session = null ; try { session = openSession ( ) ; commerceTierPriceEntry = ( CommerceTierPriceEntry ) session . get ( CommerceTierPriceEntryImpl . class , primaryKey ) ; if ( commerceTierPriceEntry != null ) { cacheResult ( commerceTierPriceEntry ) ; } else { entityCache . putResult ( CommerceTierPriceEntryModelImpl . ENTITY_CACHE_ENABLED , CommerceTierPriceEntryImpl . class , primaryKey , nullModel ) ; } } catch ( Exception e ) { entityCache . removeResult ( CommerceTierPriceEntryModelImpl . ENTITY_CACHE_ENABLED , CommerceTierPriceEntryImpl . class , primaryKey ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return commerceTierPriceEntry ;
public class AWSElasticBeanstalkClient { /** * Returns the descriptions of existing applications . * @ param describeApplicationsRequest * Request to describe one or more applications . * @ return Result of the DescribeApplications operation returned by the service . * @ sample AWSElasticBeanstalk . DescribeApplications * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticbeanstalk - 2010-12-01 / DescribeApplications " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeApplicationsResult describeApplications ( DescribeApplicationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeApplications ( request ) ;
public class MutatorImpl { /** * Deletes the columns defined in the HSuperColumn . If there are no HColumns attached , * we delete the whole thing . */ public < SN , N , V > Mutator < K > addSubDelete ( K key , String cf , HSuperColumn < SN , N , V > sc ) { } }
return addSubDelete ( key , cf , sc , keyspace . createClock ( ) ) ;
public class WorkbinsApi { /** * Get All Valid Workbins . * @ param cancelConsultData Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > getWorkbinsWithHttpInfo ( CancelConsultData1 cancelConsultData ) throws ApiException { } }
com . squareup . okhttp . Call call = getWorkbinsValidateBeforeCall ( cancelConsultData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class MapStorage { /** * Caller must hold upgrade or write lock . */ private boolean doTryDeleteNoLock ( S storable ) { } }
return mMap . remove ( new Key < S > ( storable , mFullComparator ) ) != null ;
public class WSectionRenderer { /** * Paints the given WSection . * @ param component the WSection to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WSection section = ( WSection ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; boolean renderChildren = isRenderContent ( section ) ; xml . appendTagOpen ( "ui:section" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; if ( SectionMode . LAZY . equals ( section . getMode ( ) ) ) { xml . appendOptionalAttribute ( "hidden" , ! renderChildren , "true" ) ; } else { xml . appendOptionalAttribute ( "hidden" , component . isHidden ( ) , "true" ) ; } SectionMode mode = section . getMode ( ) ; if ( mode != null ) { switch ( mode ) { case LAZY : xml . appendAttribute ( "mode" , "lazy" ) ; break ; case EAGER : xml . appendAttribute ( "mode" , "eager" ) ; break ; default : throw new SystemException ( "Unknown section mode: " + section . getMode ( ) ) ; } } xml . appendClose ( ) ; // Render margin MarginRendererUtil . renderMargin ( section , renderContext ) ; if ( renderChildren ) { // Label section . getDecoratedLabel ( ) . paint ( renderContext ) ; // Content section . getContent ( ) . paint ( renderContext ) ; } xml . appendEndTag ( "ui:section" ) ;
public class DatabaseRepresentation { /** * This method reverts the latest revision data to the requested . * @ param resourceName * The name of the XML resource . * @ param backToRevision * The revision value , which has to be set as the latest . * @ throws WebApplicationException * @ throws TTException */ public void revertToRevision ( final String resourceName , final long backToRevision ) throws JaxRxException , TTException { } }
ISession session = null ; INodeWriteTrx wtx = null ; boolean abort = false ; try { session = mDatabase . getSession ( new SessionConfiguration ( resourceName , StandardSettings . KEY ) ) ; wtx = new NodeWriteTrx ( session , session . beginBucketWtx ( ) , HashKind . Rolling ) ; wtx . revertTo ( backToRevision ) ; wtx . commit ( ) ; } catch ( final TTException exce ) { abort = true ; throw new JaxRxException ( exce ) ; } finally { WorkerHelper . closeWTX ( abort , wtx , session ) ; }
public class QueryHandler { /** * Peek the next token , returning the QUERY _ STATE corresponding to it and placing the readerIndex just after * the token ' s ' : ' . Must be at the end of the previous token . * @ param lastChunk true if this is the last chunk * @ return the next QUERY _ STATE */ private byte transitionToNextToken ( boolean lastChunk ) { } }
int endNextToken = findNextChar ( responseContent , ':' ) ; if ( endNextToken < 0 && ! lastChunk ) { return queryParsingState ; } if ( endNextToken < 0 && lastChunk && queryParsingState >= QUERY_STATE_STATUS ) { return QUERY_STATE_NO_INFO ; } byte newState ; ByteBuf peekSlice = responseContent . readSlice ( endNextToken + 1 ) ; String peek = peekSlice . toString ( CHARSET ) ; if ( peek . contains ( "\"signature\":" ) ) { newState = QUERY_STATE_SIGNATURE ; } else if ( peek . endsWith ( "\"results\":" ) ) { newState = QUERY_STATE_ROWS_DECIDE ; } else if ( peek . endsWith ( "\"status\":" ) ) { newState = QUERY_STATE_STATUS ; } else if ( peek . endsWith ( "\"errors\":" ) ) { newState = QUERY_STATE_ERROR ; } else if ( peek . endsWith ( "\"warnings\":" ) ) { newState = QUERY_STATE_WARNING ; } else if ( peek . endsWith ( "\"metrics\":" ) ) { newState = QUERY_STATE_INFO ; } else { if ( lastChunk ) { IllegalStateException e = new IllegalStateException ( "Error parsing query response (in TRANSITION) at \"" + peek + "\", enable trace to see response content" ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( responseContent . toString ( CHARSET ) , e ) ; } throw e ; } else { // we need more data return queryParsingState ; } } sectionDone = false ; return newState ;
public class EmptyCheckpointManager { /** * Do nothing since checkpoints aren ' t being persisted . */ @ Override public void trackCheckpoint ( String startToken , String checkpointToken ) { } }
tokenMap . put ( startToken , checkpointToken ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertFNCXUnitBaseToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class BackupServerApp { /** * Flush the buffer . * @ param bSendFakeTrx Send a fake trx before flushing ( This guarantees activity on the stream before the flush [ In case it just autoflushed the entire buffer ] ) */ public synchronized void flush ( boolean bSendFakeTrx ) { } }
try { if ( bSendFakeTrx ) this . getWriter ( ) . writeObject ( FAKE_TRX ) ; this . getWriter ( ) . flush ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; }
public class Error { /** * Thrown if the class isn ' t mapped . * @ param aClass class to analyze */ public static void classNotMapped ( Class < ? > aClass ) { } }
throw new ClassNotMappedException ( MSG . INSTANCE . message ( classNotMappedException1 , aClass . getSimpleName ( ) ) ) ;
public class ChainingCasProtocolValidationSpecification { /** * Add policies . * @ param policies the policies */ public void addSpecifications ( final CasProtocolValidationSpecification ... policies ) { } }
this . specifications . addAll ( Arrays . stream ( policies ) . collect ( Collectors . toList ( ) ) ) ;
public class ApplicationTenancyRepository { /** * region > findByNameOrPathMatching */ @ Programmatic public List < ApplicationTenancy > findByNameOrPathMatchingCached ( final String search ) { } }
return queryResultsCache . execute ( new Callable < List < ApplicationTenancy > > ( ) { @ Override public List < ApplicationTenancy > call ( ) throws Exception { return findByNameOrPathMatching ( search ) ; } } , ApplicationTenancyRepository . class , "findByNameOrPathMatchingCached" , search ) ;
public class DateUtils { /** * Convert an Object to a Time . */ public static java . sql . Time toTime ( Object value ) throws ParseException { } }
if ( value == null ) { return null ; } if ( value instanceof java . sql . Time ) { return ( java . sql . Time ) value ; } if ( value instanceof String ) { if ( "" . equals ( ( String ) value ) ) { return null ; } return new java . sql . Time ( IN_TIME_FORMAT . parse ( ( String ) value ) . getTime ( ) ) ; } return new java . sql . Time ( IN_TIME_FORMAT . parse ( value . toString ( ) ) . getTime ( ) ) ;
public class RpcStageEventDispatcher { /** * 触发通知 */ public boolean single ( StageType stage , EtlEventData eventData ) { } }
Assert . notNull ( eventData ) ; eventData . setCurrNid ( ArbitrateConfigUtils . getCurrentNid ( ) ) ; StageSingleEvent event = new StageSingleEvent ( ArbitrateEventType . stageSingle ) ; event . setPipelineId ( eventData . getPipelineId ( ) ) ; event . setStage ( stage ) ; event . setData ( eventData ) ; if ( isLocal ( eventData . getNextNid ( ) ) ) { // 判断是否为本地jvm return onStageSingle ( event ) ; } else { return ( Boolean ) arbitrateCommmunicationClient . call ( eventData . getNextNid ( ) , event ) ; // rpc通知下一个节点 }
public class Util { /** * Trims a String , ensuring that the maximum length isn ' t reached . * @ param string string to trim * @ param maxMessageLength maximum length of the string * @ return trimmed string */ public static String trimString ( String string , int maxMessageLength ) { } }
if ( string == null ) { return null ; } else if ( string . length ( ) > maxMessageLength ) { // CHECKSTYLE . OFF : MagicNumber return string . substring ( 0 , maxMessageLength - 3 ) + "..." ; // CHECKSTYLE . ON : MagicNumber } else { return string ; }
public class KunderaQueryUtils { /** * On logical expression . * @ param expression * the expression * @ param m * the m * @ param idColumn * the id column * @ return the filter */ public static void onLogicalExpression ( Expression expression , EntityMetadata m , KunderaMetadata kunderaMetadata , KunderaQuery kunderaQuery ) { } }
if ( expression instanceof OrExpression ) { kunderaQuery . addFilterClause ( "(" ) ; } traverse ( ( ( LogicalExpression ) expression ) . getLeftExpression ( ) , m , kunderaMetadata , kunderaQuery , false ) ; if ( expression instanceof OrExpression ) { kunderaQuery . addFilterClause ( ")" ) ; } kunderaQuery . addFilterClause ( ( ( LogicalExpression ) expression ) . getIdentifier ( ) ) ; if ( expression instanceof OrExpression ) { kunderaQuery . addFilterClause ( "(" ) ; } traverse ( ( ( LogicalExpression ) expression ) . getRightExpression ( ) , m , kunderaMetadata , kunderaQuery , false ) ; if ( expression instanceof OrExpression ) { kunderaQuery . addFilterClause ( ")" ) ; }
public class MapLocale { /** * Passing in a Locale , you are able to receive the { @ link MapLocale } object which it is currently * paired with as a fallback . If this returns null , there was no matching { @ link MapLocale } to go along with the * passed in Locale . If you expected a non - null result , you should make sure you used * { @ link # addMapLocale ( Locale , MapLocale ) } before making this call . * @ param locale the locale which you ' d like to receive its matching { @ link MapLocale } ( fallback ) if one exists * @ return the matching { @ link MapLocale } if one exists , otherwise null * @ since 0.1.0 */ @ Nullable private static MapLocale getMapLocaleFallback ( @ NonNull Locale locale ) { } }
String fallbackCode = locale . getLanguage ( ) . substring ( 0 , 2 ) ; MapLocale foundMapLocale = null ; for ( Locale possibleLocale : LOCALE_SET . keySet ( ) ) { if ( possibleLocale . getLanguage ( ) . equals ( fallbackCode ) ) { foundMapLocale = LOCALE_SET . get ( possibleLocale ) ; break ; } } return foundMapLocale ;
public class ContextInitializer { /** * Clears meta - data collections . */ private void destroyInitializer ( ) { } }
GdxMaps . clearAll ( fieldProcessors , methodProcessors , typeProcessors ) ; GdxArrays . clearAll ( scannedMetaAnnotations , scannedAnnotations , processors , delayedConstructions , manuallyAddedComponents , manuallyAddedProcessors ) ;
public class ConnectionsInner { /** * Retrieve a list of connections . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ConnectionInner & gt ; object */ public Observable < Page < ConnectionInner > > listByAutomationAccountAsync ( final String resourceGroupName , final String automationAccountName ) { } }
return listByAutomationAccountWithServiceResponseAsync ( resourceGroupName , automationAccountName ) . map ( new Func1 < ServiceResponse < Page < ConnectionInner > > , Page < ConnectionInner > > ( ) { @ Override public Page < ConnectionInner > call ( ServiceResponse < Page < ConnectionInner > > response ) { return response . body ( ) ; } } ) ;
public class Gen { /** * SQLを生成します 。 * @ param daoDesc Dao記述 * @ param sqlDesc SQL記述 */ protected void generateSql ( DaoDesc daoDesc , SqlDesc sqlDesc ) { } }
File sqlFile = FileUtil . createSqlDir ( sqlConfig . getDestDir ( ) , daoDesc . getQualifiedName ( ) , sqlDesc . getFileName ( ) ) ; GenerationContext context = new GenerationContext ( sqlDesc , sqlFile , sqlDesc . getTemplateName ( ) , "UTF-8" , sqlConfig . isOverwrite ( ) ) ; generator . generate ( context ) ;
public class TcpConnectionManager { /** * Writes the data passed in the event . * The end of record flag is used to determine if a channel is * eligible for purging . If the flag is set and all output has * been processed , the channel is purgeable until input is * received or another output event causes the state to be * reevaluated . * @ param event the event * @ param channel the channel * @ throws InterruptedException the interrupted exception */ @ Handler public void onOutput ( Output < ByteBuffer > event , TcpChannelImpl channel ) throws InterruptedException { } }
if ( channels . contains ( channel ) ) { channel . write ( event ) ; }
public class ZoneOffsetTransitionRule { /** * Obtains an instance defining the yearly rule to create transitions between two offsets . * Applications should normally obtain an instance from { @ link ZoneRules } . * This factory is only intended for use when creating { @ link ZoneRules } . * @ param month the month of the month - day of the first day of the cutover week , not null * @ param dayOfMonthIndicator the day of the month - day of the cutover week , positive if the week is that * day or later , negative if the week is that day or earlier , counting from the last day of the month , * from - 28 to 31 excluding 0 * @ param dayOfWeek the required day - of - week , null if the month - day should not be changed * @ param time the cutover time in the ' before ' offset , not null * @ param timeEndOfDay whether the time is midnight at the end of day * @ param timeDefnition how to interpret the cutover * @ param standardOffset the standard offset in force at the cutover , not null * @ param offsetBefore the offset before the cutover , not null * @ param offsetAfter the offset after the cutover , not null * @ return the rule , not null * @ throws IllegalArgumentException if the day of month indicator is invalid * @ throws IllegalArgumentException if the end of day flag is true when the time is not midnight */ public static ZoneOffsetTransitionRule of ( Month month , int dayOfMonthIndicator , DayOfWeek dayOfWeek , LocalTime time , boolean timeEndOfDay , TimeDefinition timeDefnition , ZoneOffset standardOffset , ZoneOffset offsetBefore , ZoneOffset offsetAfter ) { } }
Objects . requireNonNull ( month , "month" ) ; Objects . requireNonNull ( time , "time" ) ; Objects . requireNonNull ( timeDefnition , "timeDefnition" ) ; Objects . requireNonNull ( standardOffset , "standardOffset" ) ; Objects . requireNonNull ( offsetBefore , "offsetBefore" ) ; Objects . requireNonNull ( offsetAfter , "offsetAfter" ) ; if ( dayOfMonthIndicator < - 28 || dayOfMonthIndicator > 31 || dayOfMonthIndicator == 0 ) { throw new IllegalArgumentException ( "Day of month indicator must be between -28 and 31 inclusive excluding zero" ) ; } if ( timeEndOfDay && time . equals ( LocalTime . MIDNIGHT ) == false ) { throw new IllegalArgumentException ( "Time must be midnight when end of day flag is true" ) ; } return new ZoneOffsetTransitionRule ( month , dayOfMonthIndicator , dayOfWeek , time , timeEndOfDay , timeDefnition , standardOffset , offsetBefore , offsetAfter ) ;
public class JSEventMap { /** * Set a handler for the given JS event . If an existing handler is present , it * is automatically overridden . * @ param eJSEvent * The JS event . May not be < code > null < / code > . * @ param aNewHandler * The new handler to be added . May not be < code > null < / code > . */ public void setHandler ( @ Nonnull final EJSEvent eJSEvent , @ Nonnull final IHasJSCode aNewHandler ) { } }
ValueEnforcer . notNull ( eJSEvent , "JSEvent" ) ; ValueEnforcer . notNull ( aNewHandler , "NewHandler" ) ; // Set only the new handler and remove any existing handler m_aEvents . put ( eJSEvent , new CollectingJSCodeProvider ( ) . appendFlattened ( aNewHandler ) ) ;
public class SQLExpressions { /** * Create a dateadd ( unit , date , amount ) expression * @ param unit date part * @ param date date * @ param amount amount * @ return converted date */ public static < D extends Comparable > DateTimeExpression < D > dateadd ( DatePart unit , DateTimeExpression < D > date , int amount ) { } }
return Expressions . dateTimeOperation ( date . getType ( ) , DATE_ADD_OPS . get ( unit ) , date , ConstantImpl . create ( amount ) ) ;
public class CmsListMetadata { /** * / * package */ void checkIds ( ) { } }
Set < String > ids = new TreeSet < String > ( ) ; // indep actions Iterator < I_CmsListAction > itIndepActions = getIndependentActions ( ) . iterator ( ) ; while ( itIndepActions . hasNext ( ) ) { String id = itIndepActions . next ( ) . getId ( ) ; if ( ids . contains ( id ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , id ) ) ; } ids . add ( id ) ; } // multi actions Iterator < CmsListMultiAction > itMultiActions = getMultiActions ( ) . iterator ( ) ; while ( itMultiActions . hasNext ( ) ) { String id = itMultiActions . next ( ) . getId ( ) ; if ( ids . contains ( id ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , id ) ) ; } ids . add ( id ) ; } // details Iterator < CmsListItemDetails > itItemDetails = getItemDetailDefinitions ( ) . iterator ( ) ; while ( itItemDetails . hasNext ( ) ) { String id = itItemDetails . next ( ) . getId ( ) ; if ( ids . contains ( id ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , id ) ) ; } ids . add ( id ) ; } // columns Iterator < CmsListColumnDefinition > itColumns = getColumnDefinitions ( ) . iterator ( ) ; while ( itColumns . hasNext ( ) ) { CmsListColumnDefinition col = itColumns . next ( ) ; if ( ids . contains ( col . getId ( ) ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , col . getId ( ) ) ) ; } ids . add ( col . getId ( ) ) ; // default actions Iterator < CmsListDefaultAction > itDefaultActions = col . getDefaultActions ( ) . iterator ( ) ; while ( itDefaultActions . hasNext ( ) ) { CmsListDefaultAction action = itDefaultActions . next ( ) ; if ( ids . contains ( action . getId ( ) ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , action . getId ( ) ) ) ; } ids . add ( action . getId ( ) ) ; } // direct actions Iterator < I_CmsListDirectAction > itDirectActions = col . getDirectActions ( ) . iterator ( ) ; while ( itDirectActions . hasNext ( ) ) { I_CmsListDirectAction action = itDirectActions . next ( ) ; if ( ids . contains ( action . getId ( ) ) ) { throw new CmsIllegalStateException ( Messages . get ( ) . container ( Messages . ERR_DUPLICATED_ID_1 , action . getId ( ) ) ) ; } ids . add ( action . getId ( ) ) ; } }
public class SqlHelper { /** * Returns the name of the class to be instantiated . * @ param rs the Resultset * @ return null if the column is not available */ public static String getOjbClassName ( ResultSet rs ) { } }
try { return rs . getString ( OJB_CLASS_COLUMN ) ; } catch ( SQLException e ) { return null ; }
public class HttpSessionsParam { /** * Adds the default session token with the given name and enabled state . * @ param name the name of the session token . * @ param enabled { @ code true } if should be enabled , { @ code false } otherwise . * @ return { @ code true } if the token did not exist , { @ code false } otherwise . * @ since TODO add version */ public boolean addDefaultToken ( String name , boolean enabled ) { } }
String normalisedName = getNormalisedSessionTokenName ( name ) ; if ( ! getDefaultToken ( normalisedName ) . isPresent ( ) ) { defaultTokens . add ( new HttpSessionToken ( normalisedName , enabled ) ) ; if ( enabled ) { defaultTokensEnabled . add ( normalisedName ) ; } saveDefaultTokens ( ) ; return true ; } return false ;
public class QueryToolChest { /** * Generally speaking this is the exact same thing as makePreComputeManipulatorFn . It is leveraged in * order to compute PostAggregators on results after they have been completely merged together , which * should actually be done in the mergeResults ( ) call instead of here . * This should never actually be overridden and it should be removed as quickly as possible . * @ param query The Query that is currently being processed * @ param fn The function that should be applied to all metrics in the results * @ return A function that will apply the provided fn to all metrics in the input ResultType object */ public Function < ResultType , ResultType > makePostComputeManipulatorFn ( QueryType query , MetricManipulationFn fn ) { } }
return makePreComputeManipulatorFn ( query , fn ) ;
public class CmsEntity { /** * Removes a specific attribute value . < p > * @ param attributeName the attribute name * @ param index the value index */ public void removeAttributeValue ( String attributeName , int index ) { } }
if ( m_simpleAttributes . containsKey ( attributeName ) ) { List < String > values = m_simpleAttributes . get ( attributeName ) ; if ( ( values . size ( ) == 1 ) && ( index == 0 ) ) { removeAttributeSilent ( attributeName ) ; } else { values . remove ( index ) ; } } else if ( m_entityAttributes . containsKey ( attributeName ) ) { List < CmsEntity > values = m_entityAttributes . get ( attributeName ) ; if ( ( values . size ( ) == 1 ) && ( index == 0 ) ) { removeAttributeSilent ( attributeName ) ; } else { CmsEntity child = values . remove ( index ) ; removeChildChangeHandler ( child ) ; } } fireChange ( ) ;
public class CommandLine { /** * Add a command line switch . This method is for adding options that do not * require an argument . * @ param option * the option , must start with " - " * @ param description * single line description of the option */ public void addSwitch ( String option , String description ) { } }
optionList . add ( option ) ; optionDescriptionMap . put ( option , description ) ; if ( option . length ( ) > maxWidth ) { maxWidth = option . length ( ) ; }
public class Functions { /** * Runs decode base 64 function with arguments . * @ return */ public static String decodeBase64 ( String content , TestContext context ) { } }
return new DecodeBase64Function ( ) . execute ( Collections . singletonList ( content ) , context ) ;
public class DenyAssignmentsInner { /** * Gets deny assignments for a resource . * @ param resourceGroupName The name of the resource group . * @ param resourceProviderNamespace The namespace of the resource provider . * @ param parentResourcePath The parent resource identity . * @ param resourceType The resource type of the resource . * @ param resourceName The name of the resource to get deny assignments for . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < DenyAssignmentInner > > listForResourceAsync ( final String resourceGroupName , final String resourceProviderNamespace , final String parentResourcePath , final String resourceType , final String resourceName , final ListOperationCallback < DenyAssignmentInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listForResourceSinglePageAsync ( resourceGroupName , resourceProviderNamespace , parentResourcePath , resourceType , resourceName ) , new Func1 < String , Observable < ServiceResponse < Page < DenyAssignmentInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DenyAssignmentInner > > > call ( String nextPageLink ) { return listForResourceNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class BoxApiFolder { /** * Gets a request that renames a folder * @ param id id of folder to rename * @ param newName id of folder to retrieve info on * @ return request to rename a folder */ public BoxRequestsFolder . UpdateFolder getRenameRequest ( String id , String newName ) { } }
BoxRequestsFolder . UpdateFolder request = new BoxRequestsFolder . UpdateFolder ( id , getFolderInfoUrl ( id ) , mSession ) . setName ( newName ) ; return request ;
public class FileDescriptorLimit { /** * * Using OperatingSystemMXBean , we can obtain the total number of open file descriptors . */ @ IgnoreJRERequirement // UnixOperatingSystemMXBean private static void getOpenFileDescriptorCount ( PrintWriter writer ) throws UnsupportedEncodingException { } }
try { OperatingSystemMXBean operatingSystemMXBean = ManagementFactory . getOperatingSystemMXBean ( ) ; if ( operatingSystemMXBean instanceof UnixOperatingSystemMXBean ) { UnixOperatingSystemMXBean unixOperatingSystemMXBean = ( UnixOperatingSystemMXBean ) operatingSystemMXBean ; writer . println ( "Open File Descriptor Count: " + unixOperatingSystemMXBean . getOpenFileDescriptorCount ( ) ) ; } else { writer . println ( "Wrong bean: " + operatingSystemMXBean ) ; } } catch ( LinkageError e ) { writer . println ( "Unable to get the total number of open file descriptors using OperatingSystemMXBean" ) ; }
public class DailyTimeIntervalTrigger { /** * Called by the scheduler at the time a < code > Trigger < / code > is first added * to the scheduler , in order to have the < code > Trigger < / code > compute its * first fire time , based on any associated calendar . * After this method has been called , < code > getNextFireTime ( ) < / code > should * return a valid answer . * @ return the first time at which the < code > Trigger < / code > will be fired by * the scheduler , which is also the same value * < code > getNextFireTime ( ) < / code > will return ( until after the first * firing of the < code > Trigger < / code > ) . */ @ Override public Date computeFirstFireTime ( final ICalendar calendar ) { } }
m_aNextFireTime = getFireTimeAfter ( new Date ( getStartTime ( ) . getTime ( ) - 1000L ) ) ; // Check calendar for date - time exclusion while ( m_aNextFireTime != null && calendar != null && ! calendar . isTimeIncluded ( m_aNextFireTime . getTime ( ) ) ) { m_aNextFireTime = getFireTimeAfter ( m_aNextFireTime ) ; if ( m_aNextFireTime == null ) break ; // avoid infinite loop final Calendar c = PDTFactory . createCalendar ( ) ; c . setTime ( m_aNextFireTime ) ; if ( c . get ( Calendar . YEAR ) > CQuartz . MAX_YEAR ) { return null ; } } return m_aNextFireTime ;
public class SrvOrmAndroid { /** * < p > Insert entity into DB . * For autogenerated ID fill it in the entity < / p > * @ param < T > entity type * @ param pAddParam additional param * @ param pEntity entity * @ throws Exception - an exception */ @ Override public final < T > void insertEntity ( final Map < String , Object > pAddParam , final T pEntity ) throws Exception { } }
ColumnsValues columnsValues = evalColumnsValues ( pAddParam , pEntity ) ; columnsValues . getLongsMap ( ) . remove ( ISrvOrm . VERSIONOLD_NAME ) ; Long result = getSrvDatabase ( ) . executeInsert ( pEntity . getClass ( ) . getSimpleName ( ) . toUpperCase ( ) , columnsValues ) ; String [ ] idName = columnsValues . getIdColumnsNames ( ) ; if ( idName . length == 1 ) { // if non - composite PK Field fieldId = getUtlReflection ( ) . retrieveField ( pEntity . getClass ( ) , idName [ 0 ] ) ; fieldId . setAccessible ( true ) ; Object idValue = fieldId . get ( pEntity ) ; if ( idValue == null ) { // It must be an autogenerated Integer or Long ID if ( fieldId . getType ( ) == Long . class ) { fieldId . set ( pEntity , result ) ; } else if ( fieldId . getType ( ) == Integer . class ) { fieldId . set ( pEntity , result . intValue ( ) ) ; } else { String msg = "There is no rule to fill ID " + fieldId . getName ( ) + " of " + fieldId . getType ( ) + " in " + pEntity ; throw new ExceptionWithCode ( ExceptionWithCode . NOT_YET_IMPLEMENTED , msg ) ; } } }
public class XPathParser { /** * Parses the the rule TreatExpr according to the following production rule : * [ 17 ] TreatExpr : : = CastableExpr ( < " treat " " as " > SequenceType ) ? . * @ throws TTXPathException */ private void parseTreatExpr ( ) throws TTXPathException { } }
parseCastableExpr ( ) ; if ( is ( "treat" , true ) ) { consume ( "as" , true ) ; mPipeBuilder . addTreatExpr ( getTransaction ( ) , parseSequenceType ( ) ) ; }
public class Function { /** * Creates a node representing a binary expression tree from the given formula * @ param text is the right - hand side of a mathematical equation expressed in * infix notation * @ return a node representing a binary expression tree from the given formula * @ throws RuntimeException if the formula has syntax errors */ public Node parse ( String text ) { } }
if ( text . isEmpty ( ) ) { return null ; } String postfix = toPostfix ( text ) ; Deque < Node > stack = new ArrayDeque < Node > ( ) ; StringTokenizer tokenizer = new StringTokenizer ( postfix ) ; String token ; FunctionFactory factory = FactoryManager . instance ( ) . function ( ) ; while ( tokenizer . hasMoreTokens ( ) ) { token = tokenizer . nextToken ( ) ; Element element = factory . getObject ( token ) ; boolean isOperand = element == null && ! "(" . equals ( token ) && ! ")" . equals ( token ) && ! "," . equals ( token ) ; if ( element != null ) { if ( element . getArity ( ) > stack . size ( ) ) { throw new RuntimeException ( String . format ( "[function error] " + "operator <%s> has arity <%d>, " + "but <%d> elements are available: (%s)" , element . getName ( ) , element . getArity ( ) , stack . size ( ) , Op . join ( stack , ", " ) ) ) ; } Node node ; try { node = new Node ( element . clone ( ) ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } node . left = stack . pop ( ) ; if ( element . getArity ( ) == 2 ) { node . right = stack . pop ( ) ; } stack . push ( node ) ; } else if ( isOperand ) { Node node ; try { double value = Op . toDouble ( token ) ; node = new Node ( value ) ; } catch ( Exception ex ) { node = new Node ( token ) ; } stack . push ( node ) ; } } if ( stack . size ( ) != 1 ) { throw new RuntimeException ( String . format ( "[function error] ill-formed formula <%s> due to: <%s>" , text , Op . join ( stack , ";" ) ) ) ; } return stack . pop ( ) ;
public class InternalXbaseParser { /** * InternalXbase . g : 1017:1 : ruleXWhileExpression : ( ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) ) ; */ public final void ruleXWhileExpression ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXbase . g : 1021:2 : ( ( ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) ) ) // InternalXbase . g : 1022:2 : ( ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) ) { // InternalXbase . g : 1022:2 : ( ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) ) // InternalXbase . g : 1023:3 : ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXWhileExpressionAccess ( ) . getGroup ( ) ) ; } // InternalXbase . g : 1024:3 : ( rule _ _ XWhileExpression _ _ Group _ _ 0 ) // InternalXbase . g : 1024:4 : rule _ _ XWhileExpression _ _ Group _ _ 0 { pushFollow ( FOLLOW_2 ) ; rule__XWhileExpression__Group__0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getXWhileExpressionAccess ( ) . getGroup ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class Quantile { /** * Compute the correct final quantile from these 4 values . If the lo and hi * elements are equal , use them . However if they differ , then there is no * single value which exactly matches the desired quantile . There are * several well - accepted definitions in this case - including picking either * the lo or the hi , or averaging them , or doing a linear interpolation . * @ param lo the highest element less than or equal to the desired quantile * @ param hi the lowest element greater than or equal to the desired quantile * @ param row row number ( zero based ) of the lo element ; high element is + 1 * @ return desired quantile . */ static double computeQuantile ( double lo , double hi , double row , double nrows , double prob , QuantileModel . CombineMethod method ) { } }
if ( lo == hi ) return lo ; // Equal ; pick either if ( method == null ) method = QuantileModel . CombineMethod . INTERPOLATE ; switch ( method ) { case INTERPOLATE : return linearInterpolate ( lo , hi , row , nrows , prob ) ; case AVERAGE : return 0.5 * ( hi + lo ) ; case LOW : return lo ; case HIGH : return hi ; default : Log . info ( "Unknown even sample size quantile combination type: " + method + ". Doing linear interpolation." ) ; return linearInterpolate ( lo , hi , row , nrows , prob ) ; }
public class ReserveListenerPortMojo { /** * Returns the first number available , starting at portNumberStartingPoint that ' s not already in the reservedPorts * list . * @ param portNumberStartingPoint first port number to start from . * @ param reservedPorts the ports already reserved . * @ return first number available not in the given list , starting at the given parameter . */ private int findAvailablePortNumber ( Integer portNumberStartingPoint , List < Integer > reservedPorts ) { } }
assert portNumberStartingPoint != null ; int candidate = portNumberStartingPoint ; while ( reservedPorts . contains ( candidate ) ) { candidate ++ ; } return candidate ;
public class DOInfoReader { /** * 获得注解了 @ JoinRightTable的字段 , 如果没有注解 , 抛出NoJoinTableMemberException * @ param clazz * @ return */ public static Field getJoinRightTable ( Class < ? > clazz ) { } }
if ( clazz == null ) { throw new NoJoinTableMemberException ( "clazz is null" ) ; } List < Field > result = _getAnnotationColumns ( clazz , JoinRightTable . class ) ; if ( result == null || result . isEmpty ( ) ) { throw new NoJoinTableMemberException ( "class " + clazz . getName ( ) + " does not have @JoinRightTable field" ) ; } return result . get ( 0 ) ;
public class RottenTomatoesApi { /** * Retrieves the reviews for a movie * @ param movieId * @ param reviewType * @ param country * @ return * @ throws RottenTomatoesException */ public List < Review > getMoviesReviews ( int movieId , String reviewType , String country ) throws RottenTomatoesException { } }
return getMoviesReviews ( movieId , reviewType , DEFAULT_PAGE_LIMIT , DEFAULT_PAGE , country ) ;
public class DecodeHandler { /** * Decode the data within the viewfinder rectangle , and time how long it took . For efficiency , * reuse the same reader objects from one decode to the next . * @ param data The YUV preview frame . * @ param width The width of the preview frame . * @ param height The height of the preview frame . */ private void decode ( byte [ ] data , int width , int height ) { } }
long start = System . nanoTime ( ) ; Result rawResult = null ; PlanarYUVLuminanceSource source = activity . getCameraManager ( ) . buildLuminanceSource ( data , width , height ) ; if ( source != null ) { BinaryBitmap bitmap = new BinaryBitmap ( new HybridBinarizer ( source ) ) ; try { rawResult = multiFormatReader . decodeWithState ( bitmap ) ; } catch ( ReaderException re ) { // continue } finally { multiFormatReader . reset ( ) ; } } Handler handler = activity . getHandler ( ) ; if ( rawResult != null ) { // Don ' t log the barcode contents for security . long end = System . nanoTime ( ) ; Log . d ( TAG , "Found barcode in " + TimeUnit . NANOSECONDS . toMillis ( end - start ) + " ms" ) ; if ( handler != null ) { Message message = Message . obtain ( handler , R . id . decode_succeeded , rawResult ) ; Bundle bundle = new Bundle ( ) ; bundleThumbnail ( source , bundle ) ; message . setData ( bundle ) ; message . sendToTarget ( ) ; } } else { if ( handler != null ) { Message message = Message . obtain ( handler , R . id . decode_failed ) ; message . sendToTarget ( ) ; } }
public class RelationalOperations { /** * Returns true if the relation holds . */ private static boolean polygonRelatePolygon_ ( Polygon polygon_a , Polygon polygon_b , double tolerance , int relation , ProgressTracker progress_tracker ) { } }
switch ( relation ) { case Relation . disjoint : return polygonDisjointPolygon_ ( polygon_a , polygon_b , tolerance , progress_tracker ) ; case Relation . within : return polygonContainsPolygon_ ( polygon_b , polygon_a , tolerance , progress_tracker ) ; case Relation . contains : return polygonContainsPolygon_ ( polygon_a , polygon_b , tolerance , progress_tracker ) ; case Relation . equals : return polygonEqualsPolygon_ ( polygon_a , polygon_b , tolerance , progress_tracker ) ; case Relation . touches : return polygonTouchesPolygon_ ( polygon_a , polygon_b , tolerance , progress_tracker ) ; case Relation . overlaps : return polygonOverlapsPolygon_ ( polygon_a , polygon_b , tolerance , progress_tracker ) ; default : break ; // warning fix } return false ;
public class PropertyAnnotation { /** * Return a sorted comma - separated list of the property attributes for this annotation . */ public static String toAttributeString ( Set < String > attributes ) { } }
List < String > list = new ArrayList < String > ( attributes ) ; Collections . sort ( list , ATTRIBUTES_COMPARATOR ) ; return Joiner . on ( ", " ) . join ( list ) ;
public class Meta { /** * Convenience method which adds a tag * @ param theSystem The code system * @ param theCode The code * @ param theDisplay The display name * @ return Returns a reference to < code > this < / code > for easy chaining */ public Meta addTag ( String theSystem , String theCode , String theDisplay ) { } }
addTag ( ) . setSystem ( theSystem ) . setCode ( theCode ) . setDisplay ( theDisplay ) ; return this ;
public class HttpOutputStream { public void write ( byte b [ ] , int off , int len ) throws IOException { } }
len = prepareOutput ( len ) ; if ( ! _nulled ) _out . write ( b , off , len ) ; if ( _bytes == _contentLength ) flush ( ) ;
public class ModelsImpl { /** * Updates the name of an entity extractor . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity extractor ID . * @ param name The entity ' s new name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the OperationStatus object */ public Observable < ServiceResponse < OperationStatus > > updateEntityWithServiceResponseAsync ( UUID appId , String versionId , UUID entityId , String name ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( entityId == null ) { throw new IllegalArgumentException ( "Parameter entityId is required and cannot be null." ) ; } ModelUpdateObject modelUpdateObject = new ModelUpdateObject ( ) ; modelUpdateObject . withName ( name ) ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . updateEntity ( appId , versionId , entityId , this . client . acceptLanguage ( ) , modelUpdateObject , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < OperationStatus > > > ( ) { @ Override public Observable < ServiceResponse < OperationStatus > > call ( Response < ResponseBody > response ) { try { ServiceResponse < OperationStatus > clientResponse = updateEntityDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class SrvBalanceStd { /** * < p > Retrieve Trial Balance . < / p > * @ param pAddParam additional param * @ param pDate date * @ return balance lines * @ throws Exception - an exception */ @ Override public final synchronized List < TrialBalanceLine > retrieveTrialBalance ( final Map < String , Object > pAddParam , final Date pDate ) throws Exception { } }
recalculateAllIfNeed ( pAddParam , pDate ) ; List < TrialBalanceLine > result = new ArrayList < TrialBalanceLine > ( ) ; String query = evalQueryBalance ( pAddParam , pDate ) ; IRecordSet < RS > recordSet = null ; try { recordSet = getSrvDatabase ( ) . retrieveRecords ( query ) ; if ( recordSet . moveToFirst ( ) ) { do { String accName = recordSet . getString ( "ITSNAME" ) ; String accNumber = recordSet . getString ( "ITSNUMBER" ) ; String subaccName = recordSet . getString ( "SUBACC" ) ; Double debit = recordSet . getDouble ( "DEBIT" ) ; Double credit = recordSet . getDouble ( "CREDIT" ) ; if ( debit != 0 || credit != 0 ) { TrialBalanceLine tbl = new TrialBalanceLine ( ) ; tbl . setAccName ( accName ) ; tbl . setAccNumber ( accNumber ) ; tbl . setSubaccName ( subaccName ) ; tbl . setDebit ( BigDecimal . valueOf ( debit ) . setScale ( getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getBalancePrecision ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getRoundingMode ( ) ) ) ; tbl . setCredit ( BigDecimal . valueOf ( credit ) . setScale ( getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getBalancePrecision ( ) , getSrvAccSettings ( ) . lazyGetAccSettings ( pAddParam ) . getRoundingMode ( ) ) ) ; if ( tbl . getDebit ( ) . doubleValue ( ) != 0 || tbl . getCredit ( ) . doubleValue ( ) != 0 ) { result . add ( tbl ) ; } } } while ( recordSet . moveToNext ( ) ) ; } } finally { if ( recordSet != null ) { recordSet . close ( ) ; } } // account totals : BigDecimal debitAcc = BigDecimal . ZERO ; BigDecimal creditAcc = BigDecimal . ZERO ; String accCurr = null ; int lineCurr = 0 ; int lineStartAcc = 0 ; for ( TrialBalanceLine tbl : result ) { if ( ! tbl . getAccNumber ( ) . equals ( accCurr ) ) { // save to old if ( accCurr != null ) { for ( int j = lineStartAcc ; j < lineCurr ; j ++ ) { result . get ( j ) . setDebitAcc ( debitAcc ) ; result . get ( j ) . setCreditAcc ( creditAcc ) ; } } // init new acc : lineStartAcc = lineCurr ; accCurr = tbl . getAccNumber ( ) ; } debitAcc = debitAcc . add ( tbl . getDebit ( ) ) ; creditAcc = creditAcc . add ( tbl . getCredit ( ) ) ; lineCurr ++ ; } return result ;
public class AbstractChainableEvent { /** * { @ inheritDoc } * @ since 1.2.0 */ @ Override public ChainableEvent onSuccess ( Event onSuccessEvent , Class < ? extends IEventService > onSuccessEventService ) { } }
this . onSuccessChains . add ( new ChainLink ( ) . onSuccess ( linkChainIdentifier ( onSuccessEvent ) , onSuccessEventService ) ) ; return this ;
public class AbstractJnlpMojo { /** * Iterate through all the top level and transitive dependencies declared in the project and * collect all the runtime scope dependencies for inclusion in the . zip and signing . * @ throws MojoExecutionException if could not process dependencies */ private void processDependencies ( ) throws MojoExecutionException { } }
processDependency ( getProject ( ) . getArtifact ( ) ) ; AndArtifactFilter filter = new AndArtifactFilter ( ) ; // filter . add ( new ScopeArtifactFilter ( dependencySet . getScope ( ) ) ) ; if ( dependencies != null && dependencies . getIncludes ( ) != null && ! dependencies . getIncludes ( ) . isEmpty ( ) ) { filter . add ( new IncludesArtifactFilter ( dependencies . getIncludes ( ) ) ) ; } if ( dependencies != null && dependencies . getExcludes ( ) != null && ! dependencies . getExcludes ( ) . isEmpty ( ) ) { filter . add ( new ExcludesArtifactFilter ( dependencies . getExcludes ( ) ) ) ; } Collection < Artifact > artifacts = isExcludeTransitive ( ) ? getProject ( ) . getDependencyArtifacts ( ) : getProject ( ) . getArtifacts ( ) ; for ( Artifact artifact : artifacts ) { if ( filter . include ( artifact ) ) { processDependency ( artifact ) ; } }
public class EbeanFinder { /** * { @ inheritDoc } * Specifies a path to fetch with a specific list properties to include , to load a partial object . */ public Query < T > fetch ( String path , String fetchProperties ) { } }
return query ( ) . fetch ( path , fetchProperties ) ;
public class ListCriteria { /** * { @ inheritDoc } */ @ Override public void asSetter ( final StringBuilder sb ) { } }
sb . append ( ".setListCriteria(new String[] {" ) ; boolean firstTime = true ; for ( final E item : list ) { if ( ! firstTime ) sb . append ( "," ) ; sb . append ( "\"" ) ; sb . append ( item . toString ( ) ) ; sb . append ( "\"" ) ; firstTime = false ; } sb . append ( "})" ) ;
public class Lexer { /** * tests only */ List < Token > tokenize ( String input ) { } }
StringBuilder accumulator = new StringBuilder ( ) ; List < Token > tokens = new ArrayList < > ( ) ; input += '\0' ; for ( int i = 0 ; i < input . length ( ) ; i ++ ) { i = process ( accumulator , tokens , i , input . charAt ( i ) ) ; } return tokens ;
public class ScriptRunner { /** * Runs an SQL script ( read in using the Reader parameter ) using the * connection passed in * @ param conn - the connection to use for the script * @ param reader - the source of the script * @ throws SQLException if any SQL errors occur * @ throws IOException if there is an error reading from the Reader */ private void runScript ( Connection conn , Reader reader ) throws IOException , SQLException { } }
StringBuffer command = null ; try { LineNumberReader lineReader = new LineNumberReader ( reader ) ; String line ; while ( ( line = lineReader . readLine ( ) ) != null ) { if ( command == null ) { command = new StringBuffer ( ) ; } String trimmedLine = line . trim ( ) ; final Matcher delimMatch = delimP . matcher ( trimmedLine ) ; if ( trimmedLine . length ( ) < 1 || trimmedLine . startsWith ( "//" ) ) { // Do nothing } else if ( delimMatch . matches ( ) ) { setDelimiter ( delimMatch . group ( 2 ) , false ) ; } else if ( trimmedLine . startsWith ( "--" ) ) { logger . debug ( trimmedLine ) ; } else if ( trimmedLine . length ( ) < 1 || trimmedLine . startsWith ( "--" ) ) { // Do nothing } else if ( ! fullLineDelimiter && trimmedLine . endsWith ( getDelimiter ( ) ) || fullLineDelimiter && trimmedLine . equals ( getDelimiter ( ) ) ) { command . append ( line . substring ( 0 , line . lastIndexOf ( getDelimiter ( ) ) ) ) ; command . append ( " " ) ; this . execCommand ( conn , command , lineReader ) ; command = null ; } else { command . append ( line ) ; command . append ( "\n" ) ; } } if ( command != null && command . length ( ) > 0 ) { this . execCommand ( conn , command , lineReader ) ; } if ( ! autoCommit ) { conn . commit ( ) ; } } catch ( IOException e ) { throw new IOException ( String . format ( "Error executing '%s': %s" , command , e . getMessage ( ) ) , e ) ; } finally { conn . rollback ( ) ; }
public class GeoPackageCache { /** * Get the cached GeoPackage or open the GeoPackage file without caching it * @ param name * GeoPackage name * @ param file * GeoPackage file * @ return GeoPackage * @ since 3.1.0 */ public GeoPackage getOrNoCacheOpen ( String name , File file ) { } }
return getOrOpen ( name , file , false ) ;
public class Unpooled { /** * Creates a new 3 - byte big - endian buffer that holds the specified 24 - bit integer . */ public static ByteBuf copyMedium ( int value ) { } }
ByteBuf buf = buffer ( 3 ) ; buf . writeMedium ( value ) ; return buf ;
public class UpgradeOutputByteBufferUtil { /** * Set the amount of data to buffer internally before the stream itself * initiates a flush . A zero size means no buffer is done , each write * call will flush data . * @ param size * @ throws IllegalStateException if already writing data or closed */ private void setBufferSize ( int size ) { } }
this . amountToBuffer = size ; this . bbSize = ( 49152 < size ) ? 32768 : 8192 ; int numBuffers = ( size / this . bbSize ) ; if ( 0 == size || 0 != ( size % this . bbSize ) ) { numBuffers ++ ; } this . _output = new WsByteBuffer [ numBuffers ] ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "setBufferSize=[" + size + "]; " + this ) ; }
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the first cp definition option value rel in the ordered set where key = & # 63 ; . * @ param key the key * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp definition option value rel , or < code > null < / code > if a matching cp definition option value rel could not be found */ @ Override public CPDefinitionOptionValueRel fetchByKey_First ( String key , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) { } }
List < CPDefinitionOptionValueRel > list = findByKey ( key , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class VBSFaxClientSpi { /** * This function will submit a new fax job . < br > * The fax job ID may be populated by this method in the provided * fax job object . * @ param faxJob * The fax job object containing the needed information */ @ Override protected void submitFaxJobImpl ( FaxJob faxJob ) { } }
String name = VBSFaxClientSpi . VBS_WIN_XP_SUBMIT_SCRIPT ; if ( this . useWin2kAPI ) { name = VBSFaxClientSpi . VBS_WIN_2K_SUBMIT_SCRIPT ; } // setup input data Object [ ] input = setupSubmitFaxJobInput ( faxJob ) ; // invoke script this . invokeScript ( faxJob , name , input , FaxActionType . SUBMIT_FAX_JOB ) ;
public class NioGroovyMethods { /** * Invokes the closure for each file whose name ( file . name ) matches the given nameFilter in the given directory * - calling the { @ link org . codehaus . groovy . runtime . DefaultGroovyMethods # isCase ( Object , Object ) } method to determine if a match occurs . This method can be used * with different kinds of filters like regular expressions , classes , ranges etc . * Both regular files and subdirectories are matched . * @ param self a Path ( that happens to be a folder / directory ) * @ param nameFilter the nameFilter to perform on the name of the file ( using the { @ link org . codehaus . groovy . runtime . DefaultGroovyMethods # isCase ( Object , Object ) } method ) * @ param closure the closure to invoke * @ throws java . io . FileNotFoundException if the given directory does not exist * @ throws IllegalArgumentException if the provided Path object does not represent a directory * @ see # eachFileMatch ( Path , groovy . io . FileType , Object , groovy . lang . Closure ) * @ since 2.3.0 */ public static void eachFileMatch ( final Path self , final Object nameFilter , @ ClosureParams ( value = SimpleType . class , options = "java.nio.file.Path" ) final Closure closure ) throws IOException { } }
// throws FileNotFoundException , IllegalArgumentException { eachFileMatch ( self , FileType . ANY , nameFilter , closure ) ;
public class CommonsAssert { /** * Like JUnit assertEquals but using { @ link EqualsHelper } . * @ param x * Fist object . May be < code > null < / code > * @ param y * Second object . May be < code > null < / code > . */ public static < T > void assertEquals ( @ Nullable final T x , @ Nullable final T y ) { } }
assertEquals ( ( String ) null , x , y ) ;
public class SubPlanAssembler { /** * Partial index optimization : Remove query expressions that exactly match the index WHERE expression ( s ) * from the access path . * @ param path - Partial Index access path * @ param exprToRemove - expressions to remove */ private void filterPostPredicateForPartialIndex ( AccessPath path , List < AbstractExpression > exprToRemove ) { } }
path . otherExprs . removeAll ( exprToRemove ) ; // Keep the eliminated expressions for cost estimating purpose path . eliminatedPostExprs . addAll ( exprToRemove ) ;
public class DirPluginScanner { /** * Return a single file that should be used among all te files matching a single provider identity , or null if * the conflict cannot be resolved . */ File resolveProviderConflict ( final Collection < FileCache . MemoFile > matched ) { } }
final HashMap < File , VersionCompare > versions = new HashMap < File , VersionCompare > ( ) ; final ArrayList < File > toCompare = new ArrayList < File > ( ) ; for ( final FileCache . MemoFile file : matched ) { final String vers = getVersionForFile ( file . getFile ( ) ) ; if ( null != vers ) { versions . put ( file . getFile ( ) , VersionCompare . forString ( vers ) ) ; toCompare . add ( file . getFile ( ) ) ; } } // currently resolve via filename final Comparator < File > c = new VersionCompare . fileComparator ( versions ) ; final List < File > sorted = new ArrayList < File > ( toCompare ) ; Collections . sort ( sorted , c ) ; if ( sorted . size ( ) > 0 ) { return sorted . get ( sorted . size ( ) - 1 ) ; } return null ;
public class ChromosomeMappingTools { /** * Get the CDS position mapped on the chromosome position * @ param exonStarts * @ param exonEnds * @ param cdsStart * @ param cdsEnd * @ return */ public static ChromPos getChromPosReverse ( int cdsPos , List < Integer > exonStarts , List < Integer > exonEnds , int cdsStart , int cdsEnd ) { } }
boolean inCoding = false ; int codingLength = 0 ; if ( cdsEnd < cdsStart ) { int tmp = cdsEnd ; cdsEnd = cdsStart ; cdsStart = tmp ; } int lengthExons = 0 ; // map reverse for ( int i = exonStarts . size ( ) - 1 ; i >= 0 ; i -- ) { logger . debug ( "Exon #" + ( i + 1 ) + "/" + exonStarts . size ( ) ) ; int end = exonStarts . get ( i ) ; int start = exonEnds . get ( i ) ; if ( end < start ) { int tmp = end ; end = start ; start = tmp ; } lengthExons += end - start ; logger . debug ( " is " + cdsPos + " part of Reverse exon? " + format ( start + 1 ) + " - " + format ( end ) + " | " + ( end - start + 1 ) ) ; logger . debug ( " CDS start: " + format ( cdsStart + 1 ) + "-" + format ( cdsEnd ) + " coding length counter:" + codingLength ) ; if ( start + 1 <= cdsEnd && end >= cdsEnd ) { // FIRST EXON inCoding = true ; int tmpstart = start ; if ( start < cdsStart ) { tmpstart = cdsStart ; } // here one of the few places where we don ' t say start + 1 int check = codingLength + cdsEnd - tmpstart ; logger . debug ( "First Exon | " + ( check ) + " | " + format ( start + 1 ) + " " + format ( end ) + " | " + ( cdsEnd - tmpstart ) + " | " + cdsPos ) ; if ( ( check > cdsPos ) ) { int tmp = cdsPos - codingLength ; logger . debug ( " -> found position in UTR exon: " + format ( cdsPos ) + " " + format ( tmpstart + 1 ) + " tmp:" + format ( tmp ) + " cs:" + format ( cdsStart + 1 ) + " ce:" + format ( cdsEnd ) + " cl:" + codingLength ) ; return new ChromPos ( ( cdsEnd - tmp ) , - 1 ) ; } // don ' t add 1 here codingLength += ( cdsEnd - tmpstart ) ; boolean debug = logger . isDebugEnabled ( ) ; if ( debug ) { StringBuffer b = new StringBuffer ( ) ; b . append ( " UTR :" + format ( cdsEnd + 1 ) + " - " + format ( end ) + newline ) ; if ( tmpstart == start ) b . append ( " -> " ) ; else b . append ( " <-> " ) ; b . append ( "Exon :" + format ( tmpstart + 1 ) + " - " + ( cdsEnd ) + " | " + format ( cdsEnd - tmpstart + 1 ) + " - " + codingLength + " | " + ( codingLength % 3 ) + newline ) ; // single exon with UTR on both ends if ( tmpstart != start ) b . append ( " UTR :" + format ( cdsStart ) + " - " + format ( start + 1 ) + newline ) ; logger . debug ( b . toString ( ) ) ; } } else if ( start <= cdsStart && end >= cdsStart ) { // LAST EXON inCoding = false ; if ( codingLength + end - cdsStart >= cdsPos ) { // how many remaining coding nucleotides ? int tmp = codingLength + end - cdsStart - cdsPos ; logger . debug ( "cdl: " + codingLength + " tmp:" + tmp + " cdsStart: " + format ( cdsStart ) ) ; logger . debug ( " -> XXX found position noncoding exon: cdsPos:" + cdsPos + " s:" + format ( start + 1 ) + " tmp:" + format ( tmp ) + " cdsStart:" + ( cdsStart + 1 ) + " codingLength:" + codingLength + " cdsEnd:" + format ( cdsEnd ) ) ; return new ChromPos ( ( cdsStart + tmp ) , - 1 ) ; } codingLength += ( end - cdsStart ) ; logger . debug ( " <- Exon : " + format ( cdsStart + 1 ) + " - " + format ( end ) + " | " + format ( end - cdsStart + 1 ) + " | " + codingLength + " | " + ( codingLength % 3 ) ) ; logger . debug ( " UTR : " + format ( start + 1 ) + " - " + format ( cdsStart ) ) ; } else if ( inCoding ) { if ( codingLength + end - start - 1 >= cdsPos ) { int tmp = cdsPos - codingLength ; if ( tmp > ( end - start ) ) { tmp = ( end - start ) ; logger . debug ( "changing tmp to " + tmp ) ; } logger . debug ( " " + cdsPos + " " + codingLength + " | " + ( cdsPos - codingLength ) + " | " + ( end - start ) + " | " + tmp ) ; logger . debug ( " Exon : " + format ( start + 1 ) + " - " + format ( end ) + " | " + format ( end - start ) + " | " + codingLength + " | " + ( codingLength % 3 ) ) ; logger . debug ( " -> RRR found position coding exon: " + cdsPos + " " + format ( start + 1 ) + " " + format ( end ) + " " + tmp + " " + format ( cdsStart + 1 ) + " " + codingLength ) ; return new ChromPos ( ( end - tmp ) , cdsPos % 3 ) ; } // full exon is coding codingLength += ( end - start ) ; logger . debug ( " Exon : " + format ( start + 1 ) + " - " + format ( end ) + " | " + format ( end - start + 1 ) + " | " + codingLength + " | " + ( codingLength % 3 ) ) ; } else { // e . g . see UBQLN3 logger . debug ( " no translation!" ) ; } logger . debug ( " coding length: " + codingLength + "(phase:" + ( codingLength % 3 ) + ") CDS POS trying to map:" + cdsPos ) ; } logger . debug ( "length exons: " + lengthExons ) ; // could not map , or map over the full length ? ? return new ChromPos ( - 1 , - 1 ) ;
public class PolymerClassRewriter { /** * Create exports and externs to protect element properties and methods from renaming and dead * code removal . * < p > Since Polymer templates , observers , and computed properties rely on string references to * element properties and methods , and because we don ' t yet have a way to update those references * reliably , we instead export or extern them . * < p > For properties , we create a new interface called { @ code Polymer < ElementName > Interface } , add * all element properties to it , mark that the element class { @ code @ implements } this interface , * and add the interface to the Closure externs . The specific set of properties we add to this * interface is determined by the value of { @ code polymerExportPolicy } . * < p > For methods , when { @ code polymerExportPolicy = EXPORT _ ALL } , we instead append to { @ code * Object . prototype } in the externs using { @ code @ export } annotations . This approach is a * compromise , with the following alternatives considered : * < p > Alternative 1 : Add methods to our generated { @ code Polymer < ElementName > Interface } in the * externs . Pro : More optimal than { @ code Object . prototype } when type - aware optimizations are * enabled . Con 1 : When a class { @ code @ implements } an interface , and when { @ code * report _ missing _ override } is enabled , any method on the class that is also in the interface must * have an { @ code @ override } annotation , which means we generate a spurious warning for all * methods . Con 2 : An unresolved bug was encountered ( b / 115942961 ) relating to a mismatch between * the signatures of the class and the generated interface . * < p > Alternative 2 : Generate goog . exportProperty calls , which causes aliases on the prototype * from original to optimized names to be set . Pro : Compiled code can still use the optimized * name . Con : In practice , for Polymer applications , we see a net increase in bundle size due to * the high number of new { @ code Foo . prototype . originalName = Foo . prototype . z } expressions . * < p > Alternative 3 : Append directly to the { @ code Object . prototype } externs , instead of using * { @ code @ export } annotations for the { @ link GenerateExports } pass . Pro : Doesn ' t depend on the * { @ code generate _ exports } and { @ code export _ local _ property _ definitions } flags . Con : The * PolymerPass runs in the type checking phase , so modifying { @ code Object . prototype } here causes * unwanted type checking effects , such as allowing the method to be called on any object , and * generating incorrect warnings when { @ code report _ missing _ override } is enabled . */ private void createExportsAndExterns ( final PolymerClassDefinition cls , List < MemberDefinition > readOnlyProps , List < MemberDefinition > attributeReflectedProps ) { } }
Node block = IR . block ( ) ; String interfaceName = getInterfaceName ( cls ) ; Node fnNode = NodeUtil . emptyFunction ( ) ; compiler . reportChangeToChangeScope ( fnNode ) ; Node varNode = IR . var ( NodeUtil . newQName ( compiler , interfaceName ) , fnNode ) ; JSDocInfoBuilder info = new JSDocInfoBuilder ( true ) ; info . recordInterface ( ) ; varNode . setJSDocInfo ( info . build ( ) ) ; block . addChildToBack ( varNode ) ; String interfaceBasePath = interfaceName + ".prototype." ; if ( polymerExportPolicy == PolymerExportPolicy . EXPORT_ALL ) { // Properties from behaviors were added to our element definition earlier . appendPropertiesToBlock ( cls . props , block , interfaceBasePath ) ; // Methods from behaviors were not already added to our element definition , so we need to // export those in addition to methods defined directly on the element . Note it ' s possible // and valid for two behaviors , or a behavior and an element , to implement the same method , // so we de - dupe by name . We ' re not checking that the signatures are compatible in the way // that normal class inheritance would , but that ' s not easy to do since these aren ' t classes . // Class mixins replace Polymer behaviors and are supported directly by Closure , so new code // should use those instead . LinkedHashMap < String , MemberDefinition > uniqueMethods = new LinkedHashMap < > ( ) ; if ( cls . behaviors != null ) { for ( BehaviorDefinition behavior : cls . behaviors ) { for ( MemberDefinition method : behavior . functionsToCopy ) { uniqueMethods . put ( method . name . getString ( ) , method ) ; } } } for ( MemberDefinition method : cls . methods ) { uniqueMethods . put ( method . name . getString ( ) , method ) ; } for ( MemberDefinition method : uniqueMethods . values ( ) ) { addMethodToObjectExternsUsingExportAnnotation ( cls , method ) ; } } else if ( polymerVersion == 1 ) { // For Polymer 1 , all declared properties are non - renameable appendPropertiesToBlock ( cls . props , block , interfaceBasePath ) ; } else { // For Polymer 2 , only read - only properties and reflectToAttribute properties are // non - renameable . Other properties follow the ALL _ UNQUOTED renaming rules . List < MemberDefinition > interfaceProperties = new ArrayList < > ( ) ; interfaceProperties . addAll ( readOnlyProps ) ; if ( attributeReflectedProps != null ) { interfaceProperties . addAll ( attributeReflectedProps ) ; } appendPropertiesToBlock ( interfaceProperties , block , interfaceBasePath ) ; } for ( MemberDefinition prop : readOnlyProps ) { // Add all _ set * functions to avoid renaming . String propName = prop . name . getString ( ) ; String setterName = "_set" + propName . substring ( 0 , 1 ) . toUpperCase ( ) + propName . substring ( 1 ) ; Node setterExprNode = IR . exprResult ( NodeUtil . newQName ( compiler , interfaceBasePath + setterName ) ) ; JSDocInfoBuilder setterInfo = new JSDocInfoBuilder ( true ) ; JSTypeExpression propType = PolymerPassStaticUtils . getTypeFromProperty ( prop , compiler ) ; setterInfo . recordParameter ( propName , propType ) ; setterExprNode . getFirstChild ( ) . setJSDocInfo ( setterInfo . build ( ) ) ; block . addChildToBack ( setterExprNode ) ; } block . useSourceInfoIfMissingFromForTree ( polymerElementExterns ) ; Node scopeRoot = polymerElementExterns ; if ( ! scopeRoot . isScript ( ) ) { scopeRoot = scopeRoot . getParent ( ) ; } Node stmts = block . removeChildren ( ) ; scopeRoot . addChildrenToBack ( stmts ) ; compiler . reportChangeToEnclosingScope ( stmts ) ;
public class CacheManager { /** * cancels all tasks * @ since 5.6.3 */ public void cancelAllJobs ( ) { } }
Iterator < CacheManagerTask > iterator = mPendingTasks . iterator ( ) ; while ( iterator . hasNext ( ) ) { CacheManagerTask next = iterator . next ( ) ; next . cancel ( true ) ; } mPendingTasks . clear ( ) ;
public class druidGLexer { /** * $ ANTLR start " RCURLY " */ public final void mRCURLY ( ) throws RecognitionException { } }
try { int _type = RCURLY ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 576:8 : ( ' } ' ) // druidG . g : 576:11 : ' } ' { match ( '}' ) ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class RecurlyClient { /** * Update an account ' s billing info * When new or updated credit card information is updated , the billing information is only saved if the credit card * is valid . If the account has a past due invoice , the outstanding balance will be collected to validate the * billing information . * If the account does not exist before the API request , the account will be created if the billing information * is valid . * Please note : this API end - point may be used to import billing information without security codes ( CVV ) . * Recurly recommends requiring CVV from your customers when collecting new or updated billing information . * @ param accountCode recurly account id * @ param billingInfo billing info object to create or update * @ return the newly created or update billing info object on success , null otherwise */ public BillingInfo createOrUpdateBillingInfo ( final String accountCode , final BillingInfo billingInfo ) { } }
return doPUT ( Account . ACCOUNT_RESOURCE + "/" + accountCode + BillingInfo . BILLING_INFO_RESOURCE , billingInfo , BillingInfo . class ) ;
public class WebshimResources { /** * check if script is available and initialized . * @ return true if it exists */ public static boolean isInitialized ( ) { } }
final boolean loaded = WebshimResources . rememberEvent != null || StringUtils . isEmpty ( WebshimResources . POLIFILLER_URL ) ; return loaded || WebshimResources . isInHeader ( ) ;
public class HttpSender { /** * private void send ( HttpMessage msg , boolean isFollowRedirect , HttpOutputStream pipe , byte [ ] * buf ) throws HttpException , IOException { HttpMethod method = null ; HttpResponseHeader * resHeader = null ; * try { method = runMethod ( msg , isFollowRedirect ) ; / / successfully executed ; resHeader = * HttpMethodHelper . getHttpResponseHeader ( method ) ; * resHeader . setHeader ( HttpHeader . TRANSFER _ ENCODING , null ) ; / / * replaceAll ( " Transfer - Encoding : chunked \ r \ n " , " " ) ; msg . setResponseHeader ( resHeader ) ; * msg . getResponseBody ( ) . setCharset ( resHeader . getCharset ( ) ) ; msg . getResponseBody ( ) . setLength ( 0 ) ; * / / process response for each listener * pipe . write ( msg . getResponseHeader ( ) ) ; pipe . flush ( ) ; * if ( msg . getResponseHeader ( ) . getContentLength ( ) > = 0 & & * msg . getResponseHeader ( ) . getContentLength ( ) < 20480 ) { / / save time expanding buffer in * HttpBody if ( msg . getResponseHeader ( ) . getContentLength ( ) > 0 ) { * msg . getResponseBody ( ) . setBody ( method . getResponseBody ( ) ) ; pipe . write ( msg . getResponseBody ( ) ) ; * pipe . flush ( ) ; * } } else { / / byte [ ] buf = new byte [ 4096 ] ; InputStream in = method . getResponseBodyAsStream ( ) ; * int len = 0 ; while ( in ! = null & & ( len = in . read ( buf ) ) > 0 ) { pipe . write ( buf , 0 , len ) ; * pipe . flush ( ) ; * msg . getResponseBody ( ) . append ( buf , len ) ; } } } finally { if ( method ! = null ) { * method . releaseConnection ( ) ; } } } */ public static void addListener ( HttpSenderListener listener ) { } }
listeners . add ( listener ) ; Collections . sort ( listeners , getListenersComparator ( ) ) ;
public class ComputeNodesImpl { /** * Reinstalls the operating system on the specified compute node . * You can reinstall the operating system on a node only if it is in an idle or running state . This API can be invoked only on pools created with the cloud service configuration property . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node that you want to restart . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void reimage ( String poolId , String nodeId ) { } }
reimageWithServiceResponseAsync ( poolId , nodeId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DisasterRecoveryConfigurationsInner { /** * Gets a disaster recovery configuration . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DisasterRecoveryConfigurationInner object if successful . */ public DisasterRecoveryConfigurationInner get ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , serverName , disasterRecoveryConfigurationName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class NotifdEventConsumer { protected void setAdditionalInfoToEventCallBackStruct ( EventCallBackStruct callback_struct , String device_name , String attribute , String event_name , String [ ] filters , EventChannelStruct channel_struct ) throws DevFailed { } }
String constraint_expr = buildConstraintExpr ( device_name , attribute , event_name , filters ) ; int filter_id = add_filter_for_channel ( channel_struct , constraint_expr ) ; callback_struct . filter_constraint = constraint_expr ; callback_struct . filter_id = filter_id ; callback_struct . consumer = this ;
public class CatchyCallable { /** * { @ inheritDoc } */ @ Override public V call ( ) throws Exception { } }
try { return callable . call ( ) ; } catch ( Exception e ) { if ( log ) { logger . warn ( "exception during run" , e ) ; } throw e ; }
public class CanonicalPlanner { /** * Walk the supplied constraint to extract a list of the constraints that can be AND - ed together . For example , given the * constraint tree ( ( C1 AND C2 ) AND ( C3 OR C4 ) ) , this method would result in a list of three separate criteria : [ C1 , C2 , ( C3 OR * C4 ) ] . The resulting < code > andConstraints < / code > list will contain Constraint objects that all must be true . * @ param constraint the input constraint * @ param andableConstraints the collection into which all non - { @ link And AND } constraints should be placed */ protected void separateAndConstraints ( Constraint constraint , List < Constraint > andableConstraints ) { } }
if ( constraint == null ) return ; assert andableConstraints != null ; if ( constraint instanceof And ) { And and = ( And ) constraint ; separateAndConstraints ( and . left ( ) , andableConstraints ) ; separateAndConstraints ( and . right ( ) , andableConstraints ) ; } else { andableConstraints . add ( constraint ) ; }
public class LeaderAppender { /** * Records a completed heartbeat to the given member . */ private void recordHeartbeat ( RaftMemberContext member , long timestamp ) { } }
raft . checkThread ( ) ; // Update the member ' s heartbeat time . This will be used when calculating the quorum heartbeat time . member . setHeartbeatTime ( timestamp ) ; member . setResponseTime ( System . currentTimeMillis ( ) ) ; // Compute the quorum heartbeat time . long heartbeatTime = computeHeartbeatTime ( ) ; long currentTimestamp = System . currentTimeMillis ( ) ; // Iterate through pending timestamped heartbeat futures and complete all futures where the timestamp // is greater than the last timestamp a quorum of the cluster was contacted . Iterator < TimestampedFuture < Long > > iterator = heartbeatFutures . iterator ( ) ; while ( iterator . hasNext ( ) ) { TimestampedFuture < Long > future = iterator . next ( ) ; // If the future is timestamped prior to the last heartbeat to a majority of the cluster , complete the future . if ( future . timestamp < heartbeatTime ) { future . complete ( null ) ; iterator . remove ( ) ; } // If the future is more than an election timeout old , fail it with a protocol exception . else if ( currentTimestamp - future . timestamp > electionTimeout ) { future . completeExceptionally ( new RaftException . ProtocolException ( "Failed to reach consensus" ) ) ; iterator . remove ( ) ; } // Otherwise , we ' ve reached recent heartbeat futures . Break out of the loop . else { break ; } } // If heartbeat futures are still pending , attempt to send heartbeats . if ( ! heartbeatFutures . isEmpty ( ) ) { sendHeartbeats ( ) ; }
public class CmsEditSiteForm { /** * Get ou name for current site . < p > * @ return Full ou name */ private String getSiteOU ( ) { } }
try { m_clonedCms . getRequestContext ( ) . setSiteRoot ( "" ) ; CmsResource resource = m_clonedCms . readResource ( m_site . getSiteRoot ( ) ) ; List < CmsRelation > relations = m_clonedCms . getRelationsForResource ( resource , CmsRelationFilter . SOURCES ) ; for ( CmsRelation relation : relations ) { if ( relation . getSourcePath ( ) . startsWith ( "/system/orgunits/" ) ) { return ( relation . getSourcePath ( ) . substring ( "/system/orgunits/" . length ( ) ) ) ; } } } catch ( CmsException e ) { LOG . error ( "Error on reading OUs" , e ) ; } return "/" ;
public class ManagedClustersInner { /** * Gets access profile of a managed cluster . * Gets the accessProfile for the specified role name of the managed cluster with a specified resource group and name . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ param roleName The name of the role for managed cluster accessProfile resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ManagedClusterAccessProfileInner object if successful . */ public ManagedClusterAccessProfileInner getAccessProfiles ( String resourceGroupName , String resourceName , String roleName ) { } }
return getAccessProfilesWithServiceResponseAsync ( resourceGroupName , resourceName , roleName ) . toBlocking ( ) . single ( ) . body ( ) ;