signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LBiCharFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static < R > LBiCharFunction < R > biCharFunctionFrom ( Consumer < LBiCharFunctionBuilder < R > > buildingFunction ) { } }
LBiCharFunctionBuilder builder = new LBiCharFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class IntBitRelation { /** * Returns all right values from the relation that have the * left value specified . * @ param left left value required * @ param result where to return the result */ public void addRightWhere ( int left , IntBitSet result ) { } }
if ( line [ left ] != null ) { result . addAll ( line [ left ] ) ; }
public class StrutsUtil { /** * Return a boolean property value or the default * @ param msg MessageResources object * @ param pname String name of the property * @ param def boolean default value * @ return boolean property value or default * @ throws Throwable on error */ public static boolean getBoolProperty ( final MessageResources msg , final String pname , final boolean def ) throws Throwable { } }
String p = msg . getMessage ( pname ) ; if ( p == null ) { return def ; } return Boolean . valueOf ( p ) ;
public class CollectionFactory { /** * Sort a copy of the input collection ; if the ordering is unstable and an * error is thrown ( due to the use of TimSort in JDK 1.7 and newer ) , catch * it and leave the collection unsorted . NOTE : use this method if ordering * is desirable but not necessary . * @ param toReturn * collection to sort * @ param < T > * list type * @ return sorted copy of the input , if no errors are raised . Copy of the * original otherwise . */ @ Nonnull public static < T extends Comparable < T > > List < T > sortOptionallyComparables ( @ Nonnull Collection < T > toReturn ) { } }
List < T > list = new ArrayList < > ( toReturn ) ; try { Collections . sort ( list ) ; } catch ( IllegalArgumentException e ) { // catch possible sorting misbehaviour if ( ! e . getMessage ( ) . contains ( "Comparison method violates its general contract!" ) ) { throw e ; } // otherwise print a warning and leave the list unsorted } return list ;
public class GroupDeviceElement { /** * Ping the underlying device , return true if alive , false otherwise */ @ Override boolean ping_i ( final boolean fwd ) { } }
if ( proxy == null ) { return false ; } try { proxy . ping ( ) ; } catch ( final DevFailed df ) { return false ; } return true ;
public class SqlDocument { /** * Override to apply syntax highlighting after the document has been updated */ public void insertString ( int offset , String str , AttributeSet a ) throws BadLocationException { } }
if ( str . equals ( "{" ) ) str = addMatchingBrace ( offset ) ; super . insertString ( offset , str , a ) ; processChangedLines ( offset , str . length ( ) ) ;
public class Roles { /** * For creating a new role or a new role / user relationship . */ @ Override @ Path ( "/{roleName}/users/{cuid}" ) @ ApiOperation ( value = "Create a role or add a user to an existing role" , notes = "If users/{cuid} is present, user is added to role." , response = StatusMessage . class ) @ ApiImplicitParams ( { } }
@ ApiImplicitParam ( name = "Workgroup" , paramType = "body" , dataType = "com.centurylink.mdw.model.user.Role" ) } ) public JSONObject post ( String path , JSONObject content , Map < String , String > headers ) throws ServiceException , JSONException { String name = getSegment ( path , 1 ) ; String rel = getSegment ( path , 2 ) ; UserServices userServices = ServiceLocator . getUserServices ( ) ; try { Role existing = userServices . getRoles ( ) . get ( name ) ; if ( rel == null ) { if ( existing != null ) throw new ServiceException ( HTTP_409_CONFLICT , "Role name already exists: " + name ) ; Role role = new Role ( content ) ; userServices . createRole ( role ) ; } else if ( rel . equals ( "users" ) ) { String cuid = getSegment ( path , 3 ) ; User user = UserGroupCache . getUser ( cuid ) ; if ( user == null ) { throw new CachingException ( "Cannot find user: " + cuid ) ; } if ( user . hasRole ( name ) ) // in case added elsewhere throw new ServiceException ( HTTP_409_CONFLICT , "User " + cuid + " already has role " + name ) ; userServices . addUserToRole ( cuid , name ) ; } else { String msg = "Unsupported relationship for role " + name + ": " + rel ; throw new ServiceException ( HTTP_400_BAD_REQUEST , msg ) ; } return null ; } catch ( DataAccessException ex ) { throw new ServiceException ( HTTP_500_INTERNAL_ERROR , ex . getMessage ( ) , ex ) ; } catch ( CachingException ex ) { throw new ServiceException ( HTTP_500_INTERNAL_ERROR , ex . getMessage ( ) , ex ) ; }
public class RepositoryResourceImpl { /** * { @ inheritDoc } */ @ Override public String getProviderUrl ( ) { } }
return _asset . getProvider ( ) == null ? null : _asset . getProvider ( ) . getUrl ( ) ;
public class PagedList { /** * Call this when mLowest / HighestIndexAccessed are changed , or * mBoundaryCallbackBegin / EndDeferred is set . */ private void tryDispatchBoundaryCallbacks ( boolean post ) { } }
final boolean dispatchBegin = mBoundaryCallbackBeginDeferred && mLowestIndexAccessed <= mConfig . prefetchDistance ; final boolean dispatchEnd = mBoundaryCallbackEndDeferred && mHighestIndexAccessed >= size ( ) - 1 - mConfig . prefetchDistance ; if ( ! dispatchBegin && ! dispatchEnd ) { return ; } if ( dispatchBegin ) { mBoundaryCallbackBeginDeferred = false ; } if ( dispatchEnd ) { mBoundaryCallbackEndDeferred = false ; } if ( post ) { mMainThreadExecutor . execute ( new Runnable ( ) { @ Override public void run ( ) { dispatchBoundaryCallbacks ( dispatchBegin , dispatchEnd ) ; } } ) ; } else { dispatchBoundaryCallbacks ( dispatchBegin , dispatchEnd ) ; }
public class StudyEntry { public String getFormatAsString ( ) { } }
return impl . getFormat ( ) == null ? null : String . join ( ":" , impl . getFormat ( ) ) ;
public class A_CmsSerialDateValue { /** * Set the end type as derived from other values . */ protected final void setDerivedEndType ( ) { } }
m_endType = getPatternType ( ) . equals ( PatternType . NONE ) || getPatternType ( ) . equals ( PatternType . INDIVIDUAL ) ? EndType . SINGLE : null != getSeriesEndDate ( ) ? EndType . DATE : EndType . TIMES ;
public class RoboconfMessageQueue { /** * So , we only override offer ( ) . */ @ Override public boolean offer ( Message e ) { } }
boolean result = super . offer ( e ) ; if ( result ) { this . receivedMessagesCount . incrementAndGet ( ) ; this . timestampOfLastReceivedMessage . set ( new Date ( ) . getTime ( ) ) ; } return result ;
public class GBOXImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GBOX__RES : return RES_EDEFAULT == null ? res != null : ! RES_EDEFAULT . equals ( res ) ; case AfplibPackage . GBOX__XPOS0 : return XPOS0_EDEFAULT == null ? xpos0 != null : ! XPOS0_EDEFAULT . equals ( xpos0 ) ; case AfplibPackage . GBOX__YPOS0 : return YPOS0_EDEFAULT == null ? ypos0 != null : ! YPOS0_EDEFAULT . equals ( ypos0 ) ; case AfplibPackage . GBOX__XPOS1 : return XPOS1_EDEFAULT == null ? xpos1 != null : ! XPOS1_EDEFAULT . equals ( xpos1 ) ; case AfplibPackage . GBOX__YPOS1 : return YPOS1_EDEFAULT == null ? ypos1 != null : ! YPOS1_EDEFAULT . equals ( ypos1 ) ; case AfplibPackage . GBOX__HAXIS : return HAXIS_EDEFAULT == null ? haxis != null : ! HAXIS_EDEFAULT . equals ( haxis ) ; case AfplibPackage . GBOX__VAXIS : return VAXIS_EDEFAULT == null ? vaxis != null : ! VAXIS_EDEFAULT . equals ( vaxis ) ; } return super . eIsSet ( featureID ) ;
public class Record { /** * Create and initialize the record that has this class name . * @ param className Full class name . * @ param recordOwner The recordowner to add this record to . * @ param bErrorIfNotFound Display an error if not found . * @ return The new record . */ public static Record makeRecordFromClassName ( String className , RecordOwner recordOwner , boolean bInitRecord , boolean bErrorIfNotFound ) { } }
Record record = null ; try { record = ( Record ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( className , null , bErrorIfNotFound ) ; } catch ( RuntimeException ex ) { if ( className . startsWith ( "." ) ) if ( recordOwner != null ) if ( ! recordOwner . getClass ( ) . getName ( ) . startsWith ( BundleConstants . ROOT_PACKAGE ) ) { String packageName = Utility . getPackageName ( recordOwner . getClass ( ) . getName ( ) ) ; int domainEnd = packageName . indexOf ( "." ) ; if ( domainEnd != - 1 ) domainEnd = packageName . indexOf ( "." , domainEnd + 1 ) ; if ( domainEnd != - 1 ) { className = ClassServiceUtility . getFullClassName ( packageName . substring ( 0 , domainEnd ) , null , className ) ; ex = null ; try { record = ( Record ) ClassServiceUtility . getClassService ( ) . makeObjectFromClassName ( className , null , bErrorIfNotFound ) ; } catch ( RuntimeException e ) { ex = e ; } } if ( ex != null ) recordOwner . getTask ( ) . setLastError ( ex . getMessage ( ) ) ; } } if ( bInitRecord ) if ( record != null ) record . init ( recordOwner ) ; return record ;
public class DefaultTokenManager { /** * Set the client config that is been used on the s3client * @ param clientConfiguration */ public void setClientConfiguration ( ClientConfiguration clientConfiguration ) { } }
this . clientConfiguration = clientConfiguration ; if ( clientConfiguration != null ) { this . httpClientSettings = HttpClientSettings . adapt ( clientConfiguration ) ; if ( getProvider ( ) instanceof DefaultTokenProvider ) { DefaultTokenProvider defaultProvider = ( DefaultTokenProvider ) getProvider ( ) ; defaultProvider . setHttpClientSettings ( httpClientSettings ) ; } if ( getProvider ( ) instanceof DelegateTokenProvider ) { DelegateTokenProvider delegateProvider = ( DelegateTokenProvider ) getProvider ( ) ; delegateProvider . setHttpClientSettings ( httpClientSettings ) ; } }
public class MultiMEProxyHandler { /** * Forwards the subscribe Events onto the Neighbouring ME ' s * This will be called for Subscriptions that have arrived from Neighbouring * ME ' s and need to be forwarded onto other ME ' s outside of the Bus that the * message arrived from . * This method is called from the NeighbourProxyListener class and is called * when a proxy subscription request is received . * @ param topics The list of Topics to send to the Neighbours * @ param topicSpaces The list of topicSpaces to be forwarded * @ param BusId The Bus that this message originated from . * @ exception SIResourceException Thrown if there is an error sending * a message to a Neighbour */ protected void subscribeEvent ( List topicSpaces , List topics , String busId , Transaction transaction ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "subscribeEvent" , new Object [ ] { topics , topicSpaces , busId , transaction } ) ; try { // Get the lock Manager lock // multiple subscribes can happen at the same time - // this is allowed . _lockManager . lock ( ) ; if ( ! _started ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "subscribeEvent" , "Returning as stopped" ) ; return ; } if ( ! _reconciling ) // Call to subscribe . remoteSubscribeEvent ( topicSpaces , topics , busId , transaction , true ) ; } finally { _lockManager . unlock ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "subscribeEvent" ) ;
public class CompilationUnitMerger { /** * Util method to make source merge more convenient * @ param first merge params , specifically for the existing source * @ param second merge params , specifically for the new source * @ return merged result * @ throws ParseException cannot parse the input params */ public static String merge ( String first , String second ) throws ParseException { } }
JavaParser . setDoNotAssignCommentsPreceedingEmptyLines ( false ) ; CompilationUnit cu1 = JavaParser . parse ( new StringReader ( first ) , true ) ; CompilationUnit cu2 = JavaParser . parse ( new StringReader ( second ) , true ) ; AbstractMerger < CompilationUnit > merger = AbstractMerger . getMerger ( CompilationUnit . class ) ; CompilationUnit result = merger . merge ( cu1 , cu2 ) ; return result . toString ( ) ;
public class DescribeOrderableDBInstanceOptionsResult { /** * An < a > OrderableDBInstanceOption < / a > structure containing information about orderable options for the DB instance . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setOrderableDBInstanceOptions ( java . util . Collection ) } or * { @ link # withOrderableDBInstanceOptions ( java . util . Collection ) } if you want to override the existing values . * @ param orderableDBInstanceOptions * An < a > OrderableDBInstanceOption < / a > structure containing information about orderable options for the DB * instance . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeOrderableDBInstanceOptionsResult withOrderableDBInstanceOptions ( OrderableDBInstanceOption ... orderableDBInstanceOptions ) { } }
if ( this . orderableDBInstanceOptions == null ) { setOrderableDBInstanceOptions ( new java . util . ArrayList < OrderableDBInstanceOption > ( orderableDBInstanceOptions . length ) ) ; } for ( OrderableDBInstanceOption ele : orderableDBInstanceOptions ) { this . orderableDBInstanceOptions . add ( ele ) ; } return this ;
public class SAML2LogoutValidator { /** * Validates the SAML logout response . * @ param logoutResponse the logout response * @ param context the context * @ param engine the signature engine */ protected void validateLogoutResponse ( final LogoutResponse logoutResponse , final SAML2MessageContext context , final SignatureTrustEngine engine ) { } }
validateSuccess ( logoutResponse . getStatus ( ) ) ; validateSignatureIfItExists ( logoutResponse . getSignature ( ) , context , engine ) ; validateIssueInstant ( logoutResponse . getIssueInstant ( ) ) ; validateIssuerIfItExists ( logoutResponse . getIssuer ( ) , context ) ; verifyEndpoint ( context . getSPSSODescriptor ( ) . getSingleLogoutServices ( ) . get ( 0 ) , logoutResponse . getDestination ( ) ) ;
public class AtomContactSet { /** * Returns the list of contacts from this set that are within the given distance . * @ param distance * @ return * @ throws IllegalArgumentException if given distance is larger than distance cutoff * used for calculation of contacts */ public List < AtomContact > getContactsWithinDistance ( double distance ) { } }
if ( distance >= cutoff ) throw new IllegalArgumentException ( "Given distance " + String . format ( "%.2f" , distance ) + " is larger than contacts' distance cutoff " + String . format ( "%.2f" , cutoff ) ) ; List < AtomContact > list = new ArrayList < AtomContact > ( ) ; for ( AtomContact contact : this . contacts . values ( ) ) { if ( contact . getDistance ( ) < distance ) { list . add ( contact ) ; } } return list ;
public class SessionUtil { /** * Query IDP token url to authenticate and retrieve access token * @ param loginInput * @ param tokenUrl * @ return * @ throws SnowflakeSQLException */ private static String federatedFlowStep3 ( LoginInput loginInput , String tokenUrl ) throws SnowflakeSQLException { } }
String oneTimeToken = "" ; try { URL url = new URL ( tokenUrl ) ; URI tokenUri = url . toURI ( ) ; final HttpPost postRequest = new HttpPost ( tokenUri ) ; StringEntity params = new StringEntity ( "{\"username\":\"" + loginInput . getUserName ( ) + "\",\"password\":\"" + loginInput . getPassword ( ) + "\"}" ) ; postRequest . setEntity ( params ) ; HeaderGroup headers = new HeaderGroup ( ) ; headers . addHeader ( new BasicHeader ( HttpHeaders . ACCEPT , "application/json" ) ) ; headers . addHeader ( new BasicHeader ( HttpHeaders . CONTENT_TYPE , "application/json" ) ) ; postRequest . setHeaders ( headers . getAllHeaders ( ) ) ; final String idpResponse = HttpUtil . executeRequestWithoutCookies ( postRequest , loginInput . getLoginTimeout ( ) , 0 , null ) ; logger . debug ( "user is authenticated against {}." , loginInput . getAuthenticator ( ) ) ; // session token is in the data field of the returned json response final JsonNode jsonNode = mapper . readTree ( idpResponse ) ; oneTimeToken = jsonNode . get ( "cookieToken" ) . asText ( ) ; } catch ( IOException | URISyntaxException ex ) { handleFederatedFlowError ( loginInput , ex ) ; } return oneTimeToken ;
public class RecommendationsInner { /** * Reset all recommendation opt - out settings for a subscription . * Reset all recommendation opt - out settings for a subscription . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > resetAllFiltersAsync ( final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( resetAllFiltersWithServiceResponseAsync ( ) , serviceCallback ) ;
public class MPD9AbstractReader { /** * Process a task . * @ param row task data */ protected void processTask ( Row row ) { } }
Integer uniqueID = row . getInteger ( "TASK_UID" ) ; if ( uniqueID != null && uniqueID . intValue ( ) >= 0 ) { Task task = m_project . addTask ( ) ; TimeUnit durationFormat = MPDUtility . getDurationTimeUnits ( row . getInt ( "TASK_DUR_FMT" ) ) ; task . setActualCost ( row . getCurrency ( "TASK_ACT_COST" ) ) ; task . setActualDuration ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_ACT_DUR" ) , durationFormat ) ) ; task . setActualFinish ( row . getDate ( "TASK_ACT_FINISH" ) ) ; task . setActualOvertimeCost ( row . getCurrency ( "TASK_ACT_OVT_COST" ) ) ; task . setActualOvertimeWork ( row . getDuration ( "TASK_ACT_OVT_WORK" ) ) ; // task . setActualOvertimeWorkProtected ( ) ; task . setActualStart ( row . getDate ( "TASK_ACT_START" ) ) ; task . setActualWork ( row . getDuration ( "TASK_ACT_WORK" ) ) ; // task . setActualWorkProtected ( ) ; task . setACWP ( row . getCurrency ( "TASK_ACWP" ) ) ; task . setBaselineCost ( row . getCurrency ( "TASK_BASE_COST" ) ) ; task . setBaselineDuration ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_BASE_DUR" ) , durationFormat ) ) ; task . setBaselineFinish ( row . getDate ( "TASK_BASE_FINISH" ) ) ; task . setBaselineStart ( row . getDate ( "TASK_BASE_START" ) ) ; task . setBaselineWork ( row . getDuration ( "TASK_BASE_WORK" ) ) ; // task . setBCWP ( row . getCurrency ( " TASK _ BCWP " ) ) ; / / @ todo FIXME // task . setBCWS ( row . getCurrency ( " TASK _ BCWS " ) ) ; / / @ todo FIXME task . setCalendar ( m_project . getCalendarByUniqueID ( row . getInteger ( "TASK_CAL_UID" ) ) ) ; // task . setConfirmed ( ) ; task . setConstraintDate ( row . getDate ( "TASK_CONSTRAINT_DATE" ) ) ; task . setConstraintType ( ConstraintType . getInstance ( row . getInt ( "TASK_CONSTRAINT_TYPE" ) ) ) ; // task . setContact ( ) ; task . setCost ( row . getCurrency ( "TASK_COST" ) ) ; // task . setCost1 ( ) ; // task . setCost2 ( ) ; // task . setCost3 ( ) ; // task . setCost4 ( ) ; // task . setCost5 ( ) ; // task . setCost6 ( ) ; // task . setCost7 ( ) ; // task . setCost8 ( ) ; // task . setCost9 ( ) ; // task . setCost10 ( ) ; // task . setCostVariance ( ) ; task . setCreateDate ( row . getDate ( "TASK_CREATION_DATE" ) ) ; // task . setCritical ( row . getBoolean ( " TASK _ IS _ CRITICAL " ) ) ; @ todo FIX ME // task . setCV ( ) ; // task . setDate1 ( ) ; // task . setDate2 ( ) ; // task . setDate3 ( ) ; // task . setDate4 ( ) ; // task . setDate5 ( ) ; // task . setDate6 ( ) ; // task . setDate7 ( ) ; // task . setDate8 ( ) ; // task . setDate9 ( ) ; // task . setDate10 ( ) ; task . setDeadline ( row . getDate ( "TASK_DEADLINE" ) ) ; // task . setDelay ( ) ; task . setDuration ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_DUR" ) , durationFormat ) ) ; // task . setDuration1 ( ) ; // task . setDuration2 ( ) ; // task . setDuration3 ( ) ; // task . setDuration4 ( ) ; // task . setDuration5 ( ) ; // task . setDuration6 ( ) ; // task . setDuration7 ( ) ; // task . setDuration8 ( ) ; // task . setDuration9 ( ) ; // task . setDuration10 ( ) ; task . setDurationVariance ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_DUR_VAR" ) , durationFormat ) ) ; task . setEarlyFinish ( row . getDate ( "TASK_EARLY_FINISH" ) ) ; task . setEarlyStart ( row . getDate ( "TASK_EARLY_START" ) ) ; // task . setEarnedValueMethod ( ) ; task . setEffortDriven ( row . getBoolean ( "TASK_IS_EFFORT_DRIVEN" ) ) ; task . setEstimated ( row . getBoolean ( "TASK_DUR_IS_EST" ) ) ; task . setExpanded ( ! row . getBoolean ( "TASK_IS_COLLAPSED" ) ) ; task . setExternalTask ( row . getBoolean ( "TASK_IS_EXTERNAL" ) ) ; // task . setExternalTaskProject ( ) ; task . setFinish ( row . getDate ( "TASK_FINISH_DATE" ) ) ; // task . setFinish1 ( ) ; // task . setFinish2 ( ) ; // task . setFinish3 ( ) ; // task . setFinish4 ( ) ; // task . setFinish5 ( ) ; // task . setFinish6 ( ) ; // task . setFinish7 ( ) ; // task . setFinish8 ( ) ; // task . setFinish9 ( ) ; // task . setFinish10 ( ) ; // task . setFinishVariance ( MPDUtility . getAdjustedDuration ( m _ project , row . getInt ( " TASK _ FINISH _ VAR " ) , durationFormat ) ) ; / / Calculate for consistent results ? // task . setFixed ( ) ; task . setFixedCost ( row . getCurrency ( "TASK_FIXED_COST" ) ) ; task . setFixedCostAccrual ( AccrueType . getInstance ( row . getInt ( "TASK_FIXED_COST_ACCRUAL" ) ) ) ; // task . setFlag1 ( ) ; // task . setFlag2 ( ) ; // task . setFlag3 ( ) ; // task . setFlag4 ( ) ; // task . setFlag5 ( ) ; // task . setFlag6 ( ) ; // task . setFlag7 ( ) ; // task . setFlag8 ( ) ; // task . setFlag9 ( ) ; // task . setFlag10 ( ) ; // task . setFlag11 ( ) ; // task . setFlag12 ( ) ; // task . setFlag13 ( ) ; // task . setFlag14 ( ) ; // task . setFlag15 ( ) ; // task . setFlag16 ( ) ; // task . setFlag17 ( ) ; // task . setFlag18 ( ) ; // task . setFlag19 ( ) ; // task . setFlag20 ( ) ; task . setFreeSlack ( row . getDuration ( "TASK_FREE_SLACK" ) . convertUnits ( durationFormat , m_project . getProjectProperties ( ) ) ) ; task . setHideBar ( row . getBoolean ( "TASK_BAR_IS_HIDDEN" ) ) ; // task . setHyperlink ( ) ; // task . setHyperlinkAddress ( ) ; // task . setHyperlinkSubAddress ( ) ; task . setID ( row . getInteger ( "TASK_ID" ) ) ; task . setIgnoreResourceCalendar ( row . getBoolean ( "TASK_IGNORES_RES_CAL" ) ) ; task . setLateFinish ( row . getDate ( "TASK_LATE_FINISH" ) ) ; task . setLateStart ( row . getDate ( "TASK_LATE_START" ) ) ; task . setLevelAssignments ( row . getBoolean ( "TASK_LEVELING_ADJUSTS_ASSN" ) ) ; task . setLevelingCanSplit ( row . getBoolean ( "TASK_LEVELING_CAN_SPLIT" ) ) ; task . setLevelingDelayFormat ( MPDUtility . getDurationTimeUnits ( row . getInt ( "TASK_LEVELING_DELAY_FMT" ) ) ) ; task . setLevelingDelay ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_LEVELING_DELAY" ) , task . getLevelingDelayFormat ( ) ) ) ; // task . setLinkedFields ( row . getBoolean ( " TASK _ HAS _ LINKED _ FIELDS " ) ) ; @ todo FIXME task . setMarked ( row . getBoolean ( "TASK_IS_MARKED" ) ) ; task . setMilestone ( row . getBoolean ( "TASK_IS_MILESTONE" ) ) ; task . setName ( row . getString ( "TASK_NAME" ) ) ; // task . setNull ( ) ; // task . setNumber1 ( ) ; // task . setNumber2 ( ) ; // task . setNumber3 ( ) ; // task . setNumber4 ( ) ; // task . setNumber5 ( ) ; // task . setNumber6 ( ) ; // task . setNumber7 ( ) ; // task . setNumber8 ( ) ; // task . setNumber9 ( ) ; // task . setNumber10 ( ) ; // task . setNumber11 ( ) ; // task . setNumber12 ( ) ; // task . setNumber13 ( ) ; // task . setNumber14 ( ) ; // task . setNumber15 ( ) ; // task . setNumber16 ( ) ; // task . setNumber17 ( ) ; // task . setNumber18 ( ) ; // task . setNumber19 ( ) ; // task . setNumber20 ( ) ; task . setObjects ( getNullOnValue ( row . getInteger ( "TASK_NUM_OBJECTS" ) , 0 ) ) ; // task . setOutlineCode1 ( ) ; // task . setOutlineCode2 ( ) ; // task . setOutlineCode3 ( ) ; // task . setOutlineCode4 ( ) ; // task . setOutlineCode5 ( ) ; // task . setOutlineCode6 ( ) ; // task . setOutlineCode7 ( ) ; // task . setOutlineCode8 ( ) ; // task . setOutlineCode9 ( ) ; // task . setOutlineCode10 ( ) ; task . setOutlineLevel ( row . getInteger ( "TASK_OUTLINE_LEVEL" ) ) ; task . setOutlineNumber ( row . getString ( "TASK_OUTLINE_NUM" ) ) ; task . setOverAllocated ( row . getBoolean ( "TASK_IS_OVERALLOCATED" ) ) ; task . setOvertimeCost ( row . getCurrency ( "TASK_OVT_COST" ) ) ; // task . setOvertimeWork ( ) ; task . setPercentageComplete ( row . getDouble ( "TASK_PCT_COMP" ) ) ; task . setPercentageWorkComplete ( row . getDouble ( "TASK_PCT_WORK_COMP" ) ) ; // task . setPhysicalPercentComplete ( ) ; task . setPreleveledFinish ( row . getDate ( "TASK_PRELEVELED_FINISH" ) ) ; task . setPreleveledStart ( row . getDate ( "TASK_PRELEVELED_START" ) ) ; task . setPriority ( Priority . getInstance ( row . getInt ( "TASK_PRIORITY" ) ) ) ; task . setRecurring ( row . getBoolean ( "TASK_IS_RECURRING" ) ) ; task . setRegularWork ( row . getDuration ( "TASK_REG_WORK" ) ) ; task . setRemainingCost ( row . getCurrency ( "TASK_REM_COST" ) ) ; task . setRemainingDuration ( MPDUtility . getAdjustedDuration ( m_project , row . getInt ( "TASK_REM_DUR" ) , durationFormat ) ) ; task . setRemainingOvertimeCost ( row . getCurrency ( "TASK_REM_OVT_COST" ) ) ; task . setRemainingOvertimeWork ( row . getDuration ( "TASK_REM_OVT_WORK" ) ) ; task . setRemainingWork ( row . getDuration ( "TASK_REM_WORK" ) ) ; // task . setResourceGroup ( ) ; // task . setResourceInitials ( ) ; // task . setResourceNames ( ) ; task . setResume ( row . getDate ( "TASK_RESUME_DATE" ) ) ; // task . setResumeNoEarlierThan ( ) ; // task . setResumeValid ( ) ; task . setRollup ( row . getBoolean ( "TASK_IS_ROLLED_UP" ) ) ; task . setStart ( row . getDate ( "TASK_START_DATE" ) ) ; // task . setStart1 ( ) ; // task . setStart2 ( ) ; // task . setStart3 ( ) ; // task . setStart4 ( ) ; // task . setStart5 ( ) ; // task . setStart6 ( ) ; // task . setStart7 ( ) ; // task . setStart8 ( ) ; // task . setStart9 ( ) ; // task . setStart10 ( ) ; // task . setStartVariance ( MPDUtility . getAdjustedDuration ( m _ project , row . getInt ( " TASK _ START _ VAR " ) , durationFormat ) ) ; / / more accurate by calculation ? task . setStop ( row . getDate ( "TASK_STOP_DATE" ) ) ; task . setSummary ( row . getBoolean ( "TASK_IS_SUMMARY" ) ) ; // task . setText1 ( ) ; // task . setText2 ( ) ; // task . setText3 ( ) ; // task . setText4 ( ) ; // task . setText5 ( ) ; // task . setText6 ( ) ; // task . setText7 ( ) ; // task . setText8 ( ) ; // task . setText9 ( ) ; // task . setText10 ( ) ; // task . setText11 ( ) ; // task . setText12 ( ) ; // task . setText13 ( ) ; // task . setText14 ( ) ; // task . setText15 ( ) ; // task . setText16 ( ) ; // task . setText17 ( ) ; // task . setText18 ( ) ; // task . setText19 ( ) ; // task . setText20 ( ) ; // task . setText21 ( ) ; // task . setText22 ( ) ; // task . setText23 ( ) ; // task . setText24 ( ) ; // task . setText25 ( ) ; // task . setText26 ( ) ; // task . setText27 ( ) ; // task . setText28 ( ) ; // task . setText29 ( ) ; // task . setText30 ( ) ; // task . setTotalSlack ( row . getDuration ( " TASK _ TOTAL _ SLACK " ) ) ; / / @ todo FIX ME task . setType ( TaskType . getInstance ( row . getInt ( "TASK_TYPE" ) ) ) ; task . setUniqueID ( uniqueID ) ; // task . setUpdateNeeded ( ) ; task . setWBS ( row . getString ( "TASK_WBS" ) ) ; // task . setWBSLevel ( ) ; task . setWork ( row . getDuration ( "TASK_WORK" ) ) ; // task . setWorkVariance ( ) ; // TASK _ HAS _ NOTES = false ( java . lang . Boolean ) // TASK _ RTF _ NOTES = null ( ) String notes = row . getString ( "TASK_RTF_NOTES" ) ; if ( notes != null ) { if ( m_preserveNoteFormatting == false ) { notes = RtfHelper . strip ( notes ) ; } task . setNotes ( notes ) ; } // Calculate the cost variance if ( task . getCost ( ) != null && task . getBaselineCost ( ) != null ) { task . setCostVariance ( NumberHelper . getDouble ( task . getCost ( ) . doubleValue ( ) - task . getBaselineCost ( ) . doubleValue ( ) ) ) ; } // Set default flag values task . setFlag ( 1 , false ) ; task . setFlag ( 2 , false ) ; task . setFlag ( 3 , false ) ; task . setFlag ( 4 , false ) ; task . setFlag ( 5 , false ) ; task . setFlag ( 6 , false ) ; task . setFlag ( 7 , false ) ; task . setFlag ( 8 , false ) ; task . setFlag ( 9 , false ) ; task . setFlag ( 10 , false ) ; // If we have a WBS value from the MPD file , don ' t autogenerate if ( task . getWBS ( ) != null ) { m_autoWBS = false ; } // Attempt to identify null tasks if ( task . getName ( ) == null && task . getStart ( ) == null && task . getFinish ( ) == null ) { task . setNull ( true ) ; } m_eventManager . fireTaskReadEvent ( task ) ; }
public class OrganizationResourceImpl { /** * Creates a new client version . * @ param bean * @ param client * @ throws StorageException */ protected ClientVersionBean createClientVersionInternal ( NewClientVersionBean bean , ClientBean client ) throws StorageException { } }
if ( ! BeanUtils . isValidVersion ( bean . getVersion ( ) ) ) { throw new StorageException ( "Invalid/illegal client version: " + bean . getVersion ( ) ) ; // $ NON - NLS - 1 $ } ClientVersionBean newVersion = new ClientVersionBean ( ) ; newVersion . setClient ( client ) ; newVersion . setCreatedBy ( securityContext . getCurrentUser ( ) ) ; newVersion . setCreatedOn ( new Date ( ) ) ; newVersion . setModifiedBy ( securityContext . getCurrentUser ( ) ) ; newVersion . setModifiedOn ( new Date ( ) ) ; newVersion . setStatus ( ClientStatus . Created ) ; newVersion . setVersion ( bean . getVersion ( ) ) ; newVersion . setApikey ( bean . getApiKey ( ) ) ; if ( newVersion . getApikey ( ) == null ) { newVersion . setApikey ( apiKeyGenerator . generate ( ) ) ; } storage . createClientVersion ( newVersion ) ; storage . createAuditEntry ( AuditUtils . clientVersionCreated ( newVersion , securityContext ) ) ; log . debug ( String . format ( "Created new client version %s: %s" , newVersion . getClient ( ) . getName ( ) , newVersion ) ) ; // $ NON - NLS - 1 $ return newVersion ;
public class ConstraintMessage { /** * Gets a method parameter ( or a parameter field ) name , if the violation raised in it . */ private static Optional < String > getMemberName ( ConstraintViolation < ? > violation , Invocable invocable ) { } }
final List < Path . Node > propertyPath = Lists . of ( violation . getPropertyPath ( ) ) ; final int size = propertyPath . size ( ) ; if ( size < 2 ) { return Optional . empty ( ) ; } final Path . Node parent = propertyPath . get ( size - 2 ) ; final Path . Node member = propertyPath . get ( size - 1 ) ; switch ( parent . getKind ( ) ) { case PARAMETER : // Constraint violation most likely failed with a BeanParam final List < Parameter > parameters = invocable . getParameters ( ) ; final Parameter param = parameters . get ( parent . as ( Path . ParameterNode . class ) . getParameterIndex ( ) ) ; // Extract the failing * Param annotation inside the Bean Param if ( param . getSource ( ) . equals ( Parameter . Source . BEAN_PARAM ) ) { final Field field = FieldUtils . getField ( param . getRawType ( ) , member . getName ( ) , true ) ; return JerseyParameterNameProvider . getParameterNameFromAnnotations ( field . getDeclaredAnnotations ( ) ) ; } break ; case METHOD : return Optional . of ( member . getName ( ) ) ; default : break ; } return Optional . empty ( ) ;
public class TagLibraryInfo { /** * Get the FunctionInfo for a given function name , looking through all the * functions in this tag library . * @ param name The name ( no prefix ) of the function * @ return the FunctionInfo for the function with the given name , or null * if no such function exists * @ since JSP 2.0 */ public FunctionInfo getFunction ( String name ) { } }
if ( functions == null || functions . length == 0 ) { System . err . println ( "No functions" ) ; return null ; } for ( int i = 0 ; i < functions . length ; i ++ ) { if ( functions [ i ] . getName ( ) . equals ( name ) ) { return functions [ i ] ; } } return null ;
public class StandardDdlParser { /** * Utility method subclasses can override to check unknown tokens and perform additional node manipulation . Example would be * in Oracle dialect for CREATE FUNCTION statements that can end with an ' / ' character because statement can contain multiple * statements . * @ param tokens the { @ link DdlTokenStream } representing the tokenized DDL content ; may not be null * @ param tokenValue the string value of the unknown token ; never null * @ return the new node * @ throws ParsingException */ public AstNode handleUnknownToken ( DdlTokenStream tokens , String tokenValue ) throws ParsingException { } }
assert tokens != null ; assert tokenValue != null ; // DEFAULT IMPLEMENTATION DOES NOTHING return null ;
public class GeometryExpressions { /** * Create a new GeometryExpression * @ param value Geometry * @ return new GeometryExpression */ public static < T extends Geometry > GeometryExpression < T > asGeometry ( T value ) { } }
return asGeometry ( Expressions . constant ( value ) ) ;
public class CPDAvailabilityEstimatePersistenceImpl { /** * Returns the cpd availability estimates before and after the current cpd availability estimate in the ordered set where commerceAvailabilityEstimateId = & # 63 ; . * @ param CPDAvailabilityEstimateId the primary key of the current cpd availability estimate * @ param commerceAvailabilityEstimateId the commerce availability estimate ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cpd availability estimate * @ throws NoSuchCPDAvailabilityEstimateException if a cpd availability estimate with the primary key could not be found */ @ Override public CPDAvailabilityEstimate [ ] findByCommerceAvailabilityEstimateId_PrevAndNext ( long CPDAvailabilityEstimateId , long commerceAvailabilityEstimateId , OrderByComparator < CPDAvailabilityEstimate > orderByComparator ) throws NoSuchCPDAvailabilityEstimateException { } }
CPDAvailabilityEstimate cpdAvailabilityEstimate = findByPrimaryKey ( CPDAvailabilityEstimateId ) ; Session session = null ; try { session = openSession ( ) ; CPDAvailabilityEstimate [ ] array = new CPDAvailabilityEstimateImpl [ 3 ] ; array [ 0 ] = getByCommerceAvailabilityEstimateId_PrevAndNext ( session , cpdAvailabilityEstimate , commerceAvailabilityEstimateId , orderByComparator , true ) ; array [ 1 ] = cpdAvailabilityEstimate ; array [ 2 ] = getByCommerceAvailabilityEstimateId_PrevAndNext ( session , cpdAvailabilityEstimate , commerceAvailabilityEstimateId , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getMMT ( ) { } }
if ( mmtEClass == null ) { mmtEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 301 ) ; } return mmtEClass ;
public class ClasspathOrder { /** * Test to see if a RelativePath has been filtered out by the user . * @ param classpathElementPath * the classpath element path * @ return true , if not filtered out */ private boolean filter ( final String classpathElementPath ) { } }
if ( scanSpec . classpathElementFilters != null ) { for ( final ClasspathElementFilter filter : scanSpec . classpathElementFilters ) { if ( ! filter . includeClasspathElement ( classpathElementPath ) ) { return false ; } } } return true ;
public class TextWriterImageVisitor { /** * Write parameter to output file ( and possibly screen ) . * @ param toWrite Text to write to file */ protected void write ( String toWrite ) throws IOException { } }
if ( ! okToWrite ) throw new IOException ( "file not open for writing." ) ; if ( printToScreen ) System . out . print ( toWrite ) ; try { out . write ( DFSUtil . string2Bytes ( toWrite ) ) ; } catch ( IOException e ) { okToWrite = false ; throw e ; }
public class JsRuntime { /** * Returns the field containing the extension object for the given field descriptor . */ public static Expression extensionField ( FieldDescriptor desc ) { } }
String jsExtensionImport = ProtoUtils . getJsExtensionImport ( desc ) ; String jsExtensionName = ProtoUtils . getJsExtensionName ( desc ) ; return symbolWithNamespace ( jsExtensionImport , jsExtensionName ) ;
public class WordVectorSerializer { /** * TODO : this method needs better name : ) */ public static WordVectors loadStaticModel ( @ NonNull File file ) { } }
if ( ! file . exists ( ) || file . isDirectory ( ) ) throw new RuntimeException ( new FileNotFoundException ( "File [" + file . getAbsolutePath ( ) + "] was not found" ) ) ; int originalFreq = Nd4j . getMemoryManager ( ) . getOccasionalGcFrequency ( ) ; boolean originalPeriodic = Nd4j . getMemoryManager ( ) . isPeriodicGcActive ( ) ; if ( originalPeriodic ) Nd4j . getMemoryManager ( ) . togglePeriodicGc ( false ) ; Nd4j . getMemoryManager ( ) . setOccasionalGcFrequency ( 50000 ) ; CompressedRamStorage < Integer > storage = new CompressedRamStorage . Builder < Integer > ( ) . useInplaceCompression ( false ) . setCompressor ( new NoOp ( ) ) . emulateIsAbsent ( false ) . build ( ) ; VocabCache < VocabWord > vocabCache = new AbstractCache . Builder < VocabWord > ( ) . build ( ) ; // now we need to define which file format we have here // if zip - that ' s dl4j format try { log . debug ( "Trying DL4j format..." ) ; File tmpFileSyn0 = DL4JFileUtils . createTempFile ( "word2vec" , "syn" ) ; tmpFileSyn0 . deleteOnExit ( ) ; ZipFile zipFile = new ZipFile ( file ) ; ZipEntry syn0 = zipFile . getEntry ( "syn0.txt" ) ; InputStream stream = zipFile . getInputStream ( syn0 ) ; FileUtils . copyInputStreamToFile ( stream , tmpFileSyn0 ) ; storage . clear ( ) ; try ( Reader reader = new CSVReader ( tmpFileSyn0 ) ) { while ( reader . hasNext ( ) ) { Pair < VocabWord , float [ ] > pair = reader . next ( ) ; VocabWord word = pair . getFirst ( ) ; storage . store ( word . getIndex ( ) , pair . getSecond ( ) ) ; vocabCache . addToken ( word ) ; vocabCache . addWordToIndex ( word . getIndex ( ) , word . getLabel ( ) ) ; Nd4j . getMemoryManager ( ) . invokeGcOccasionally ( ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { if ( originalPeriodic ) Nd4j . getMemoryManager ( ) . togglePeriodicGc ( true ) ; Nd4j . getMemoryManager ( ) . setOccasionalGcFrequency ( originalFreq ) ; try { tmpFileSyn0 . delete ( ) ; } catch ( Exception e ) { } } } catch ( Exception e ) { try { // try to load file as text csv vocabCache = new AbstractCache . Builder < VocabWord > ( ) . build ( ) ; storage . clear ( ) ; log . debug ( "Trying CSVReader..." ) ; try ( Reader reader = new CSVReader ( file ) ) { while ( reader . hasNext ( ) ) { Pair < VocabWord , float [ ] > pair = reader . next ( ) ; VocabWord word = pair . getFirst ( ) ; storage . store ( word . getIndex ( ) , pair . getSecond ( ) ) ; vocabCache . addToken ( word ) ; vocabCache . addWordToIndex ( word . getIndex ( ) , word . getLabel ( ) ) ; Nd4j . getMemoryManager ( ) . invokeGcOccasionally ( ) ; } } catch ( Exception ef ) { // we throw away this exception , and trying to load data as binary model throw new RuntimeException ( ef ) ; } finally { if ( originalPeriodic ) Nd4j . getMemoryManager ( ) . togglePeriodicGc ( true ) ; Nd4j . getMemoryManager ( ) . setOccasionalGcFrequency ( originalFreq ) ; } } catch ( Exception ex ) { // otherwise it ' s probably google model . which might be compressed or not log . debug ( "Trying BinaryReader..." ) ; vocabCache = new AbstractCache . Builder < VocabWord > ( ) . build ( ) ; storage . clear ( ) ; try ( Reader reader = new BinaryReader ( file ) ) { while ( reader . hasNext ( ) ) { Pair < VocabWord , float [ ] > pair = reader . next ( ) ; VocabWord word = pair . getFirst ( ) ; storage . store ( word . getIndex ( ) , pair . getSecond ( ) ) ; vocabCache . addToken ( word ) ; vocabCache . addWordToIndex ( word . getIndex ( ) , word . getLabel ( ) ) ; Nd4j . getMemoryManager ( ) . invokeGcOccasionally ( ) ; } } catch ( Exception ez ) { throw new RuntimeException ( "Unable to guess input file format" ) ; } finally { if ( originalPeriodic ) Nd4j . getMemoryManager ( ) . togglePeriodicGc ( true ) ; Nd4j . getMemoryManager ( ) . setOccasionalGcFrequency ( originalFreq ) ; } } finally { if ( originalPeriodic ) Nd4j . getMemoryManager ( ) . togglePeriodicGc ( true ) ; Nd4j . getMemoryManager ( ) . setOccasionalGcFrequency ( originalFreq ) ; } } StaticWord2Vec word2Vec = new StaticWord2Vec . Builder ( storage , vocabCache ) . build ( ) ; return word2Vec ;
public class SibRaDispatcher { /** * This method is used as a workaround to increase the retry count on a message . * The better solution is to allow us to unlock a message passing in a parameter * which states if we wish to increase the retry count or not * @ param msg The message to increase the retry count on */ protected void increaseRetryCount ( final SIMessageHandle msgHandle ) { } }
final String methodName = "increaseRetryCount" ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName ) ; } try { SIUncoordinatedTransaction localTran = _connection . createUncoordinatedTransaction ( ) ; deleteMessages ( new SIMessageHandle [ ] { msgHandle } , localTran ) ; localTran . rollback ( ) ; } catch ( Exception exception ) { FFDCFilter . processException ( exception , CLASS_NAME + "." + methodName , "1:1547:1.73" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
public class GVRInputManager { /** * Get the first controller of a specified type * @ param type controller type to search for * @ return controller found or null if no controllers of the given type */ public GVRCursorController findCursorController ( GVRControllerType type ) { } }
for ( int index = 0 , size = cache . size ( ) ; index < size ; index ++ ) { int key = cache . keyAt ( index ) ; GVRCursorController controller = cache . get ( key ) ; if ( controller . getControllerType ( ) . equals ( type ) ) { return controller ; } } return null ;
public class ServiceUtils { /** * Returns the part size of the part * @ param getObjectRequest the request to check * @ param s3 the s3 client * @ param partNumber the part number * @ return the part size */ @ SdkInternalApi public static long getPartSize ( GetObjectRequest getObjectRequest , AmazonS3 s3 , int partNumber ) { } }
ValidationUtils . assertNotNull ( s3 , "S3 client" ) ; ValidationUtils . assertNotNull ( getObjectRequest , "GetObjectRequest" ) ; GetObjectMetadataRequest getObjectMetadataRequest = RequestCopyUtils . createGetObjectMetadataRequestFrom ( getObjectRequest ) . withPartNumber ( partNumber ) ; return s3 . getObjectMetadata ( getObjectMetadataRequest ) . getContentLength ( ) ;
public class CommercePriceEntryPersistenceImpl { /** * Returns a range of all the commerce price entries where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommercePriceEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of commerce price entries * @ param end the upper bound of the range of commerce price entries ( not inclusive ) * @ return the range of matching commerce price entries */ @ Override public List < CommercePriceEntry > findByGroupId ( long groupId , int start , int end ) { } }
return findByGroupId ( groupId , start , end , null ) ;
public class IPv6Network { /** * Split a network in smaller subnets of a given size . * @ param size size ( expressed as { @ link com . googlecode . ipv6 . IPv6NetworkMask } ) of the subnets * @ return iterator of the splitted subnets . * @ throws IllegalArgumentException if the requested size is bigger than the original size */ public Iterator < IPv6Network > split ( IPv6NetworkMask size ) { } }
if ( size . asPrefixLength ( ) < this . getNetmask ( ) . asPrefixLength ( ) ) throw new IllegalArgumentException ( String . format ( "Can not split a network of size %s in subnets of larger size %s" , this . getNetmask ( ) . asPrefixLength ( ) , size . asPrefixLength ( ) ) ) ; return new IPv6NetworkSplitsIterator ( size ) ;
public class TransparentToolTipDialog { /** * Initializes the components of the tooltip window . */ private void initComponents ( ) { } }
// Avoid warning on Mac OS X when changing the alpha getRootPane ( ) . putClientProperty ( "apple.awt.draggableWindowBackground" , Boolean . FALSE ) ; toolTip = new JToolTip ( ) ; toolTip . addMouseListener ( new TransparencyAdapter ( ) ) ; owner . addComponentListener ( locationAdapter ) ; owner . addAncestorListener ( locationAdapter ) ; getRootPane ( ) . setWindowDecorationStyle ( JRootPane . NONE ) ; // Just in case . . . setFocusable ( false ) ; // Just in case . . . setFocusableWindowState ( false ) ; setContentPane ( toolTip ) ; pack ( ) ; // Seems to help for the very first setVisible ( true ) when window transparency is on
public class NormalizedKeySorter { /** * Writes a given record to this sort buffer . The written record will be appended and take * the last logical position . * @ param record The record to be written . * @ return True , if the record was successfully written , false , if the sort buffer was full . * @ throws IOException Thrown , if an error occurred while serializing the record into the buffers . */ @ Override public boolean write ( T record ) throws IOException { } }
// check whether we need a new memory segment for the sort index if ( this . currentSortIndexOffset > this . lastIndexEntryOffset ) { if ( memoryAvailable ( ) ) { this . currentSortIndexSegment = nextMemorySegment ( ) ; this . sortIndex . add ( this . currentSortIndexSegment ) ; this . currentSortIndexOffset = 0 ; this . sortIndexBytes += this . segmentSize ; } else { return false ; } } // serialize the record into the data buffers try { this . serializer . serialize ( record , this . recordCollector ) ; } catch ( EOFException e ) { return false ; } final long newOffset = this . recordCollector . getCurrentOffset ( ) ; final boolean shortRecord = newOffset - this . currentDataBufferOffset < LARGE_RECORD_THRESHOLD ; if ( ! shortRecord && LOG . isDebugEnabled ( ) ) { LOG . debug ( "Put a large record ( >" + LARGE_RECORD_THRESHOLD + " into the sort buffer" ) ; } // add the pointer and the normalized key this . currentSortIndexSegment . putLong ( this . currentSortIndexOffset , shortRecord ? this . currentDataBufferOffset : ( this . currentDataBufferOffset | LARGE_RECORD_TAG ) ) ; if ( this . numKeyBytes != 0 ) { this . comparator . putNormalizedKey ( record , this . currentSortIndexSegment , this . currentSortIndexOffset + OFFSET_LEN , this . numKeyBytes ) ; } this . currentSortIndexOffset += this . indexEntrySize ; this . currentDataBufferOffset = newOffset ; this . numRecords ++ ; return true ;
public class ServiceOperations { /** * Initialize a service . * The service state is checked < i > before < / i > the operation begins . * This process is < i > not < / i > thread safe . * @ param service a service that must be in the state * { @ link Service . STATE # NOTINITED } * @ param configuration the configuration to initialize the service with * @ throws RuntimeException on a state change failure * @ throws IllegalStateException if the service is in the wrong state */ public static void init ( Service service , HiveConf configuration ) { } }
Service . STATE state = service . getServiceState ( ) ; ensureCurrentState ( state , Service . STATE . NOTINITED ) ; service . init ( configuration ) ;
public class TreeGraphNode { /** * Sets the children of this < code > TreeGraphNode < / code > . If * given < code > null < / code > , this method sets * the node ' s children to the canonical zero - length Tree [ ] array . * @ param children an array of child trees */ @ Override public void setChildren ( Tree [ ] children ) { } }
if ( children == null || children . length == 0 ) { this . children = ZERO_TGN_CHILDREN ; } else { if ( children instanceof TreeGraphNode [ ] ) { this . children = ( TreeGraphNode [ ] ) children ; } else { this . children = new TreeGraphNode [ children . length ] ; for ( int i = 0 ; i < children . length ; i ++ ) { this . children [ i ] = ( TreeGraphNode ) children [ i ] ; } } }
public class CmsXmlContentDefinition { /** * Factory method to unmarshal ( read ) a XML content definition instance from a byte array * that contains XML data . < p > * @ param xmlData the XML data in a byte array * @ param schemaLocation the location from which the XML schema was read ( system id ) * @ param resolver the XML entity resolver to use * @ return a XML content definition instance unmarshalled from the byte array * @ throws CmsXmlException if something goes wrong */ public static CmsXmlContentDefinition unmarshal ( byte [ ] xmlData , String schemaLocation , EntityResolver resolver ) throws CmsXmlException { } }
schemaLocation = translateSchema ( schemaLocation ) ; CmsXmlContentDefinition result = getCachedContentDefinition ( schemaLocation , resolver ) ; if ( result == null ) { // content definition was not found in the cache , unmarshal the XML document result = unmarshalInternal ( CmsXmlUtils . unmarshalHelper ( xmlData , resolver ) , schemaLocation , resolver ) ; } return result ;
public class MessageStoreImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . admin . JsEngineComponent # start ( int ) */ @ Override public void start ( int arg0 ) throws Exception { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "start" ) ; // Venu Liberty change // activating the Alarm manager ( i . e this triggers Alarm manager ' s Activate ) . . // Alaram manager has to be ready for Message Store Expirer alarmManagerRef . getService ( ) ; // PM44028 // Reset the health state : we ' ll assume the database is healthy unless / until some // information suggests otherwise . _healthState = JsHealthState . getOK ( ) ; synchronized ( _startLock ) { if ( _state != MessageStoreState . STATE_STOPPED ) { throw new IllegalStateException ( nls . getFormattedMessage ( "INVALID_MSGSTORE_STATE_SIMS0505" , new Object [ ] { _state } , null ) ) ; } _state = MessageStoreState . STATE_STARTING ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Message store state is : " + _state ) ; } // Defect 465809 // reset the startup Exception list _startupExceptions . clear ( ) ; // choose map type and size . String mapType = getProperty ( PROP_ITEM_MAP_TYPE , PROP_ITEM_MAP_TYPE_DEFAULT ) ; String strMapSize = getProperty ( PROP_ITEM_MAP_SIZE , PROP_ITEM_MAP_SIZE_DEFAULT ) ; int mapSize = - 1 ; if ( null != strMapSize ) { mapSize = Integer . parseInt ( strMapSize ) ; } if ( PROP_ITEM_MAP_TYPE_FASTMAP . equals ( mapType ) ) { String strMapParallelism = getProperty ( PROP_ITEM_MAP_PARALLELISM , PROP_ITEM_MAP_PARALLELISM_DEFAULT ) ; int mapParallelism = - 1 ; if ( null != strMapParallelism ) { mapParallelism = Integer . parseInt ( strMapParallelism ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "using itemLinkMap(" + mapSize + "/" + mapParallelism + ")" ) ; _membershipMap = new ItemLinkMap ( mapSize , mapParallelism ) ; } else { // default if ( mapSize > 50 || mapSize < 5 ) { mapSize = 20 ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "using multiMap(" + mapSize + ")" ) ; _membershipMap = new MultiHashMap ( mapSize ) ; } // PK57432 String propValue = getProperty ( PROP_JDBC_SPILL_SIZE_MSG_REFS_BY_MSG_SIZE , PROP_JDBC_SPILL_SIZE_MSG_REFS_BY_MSG_SIZE_DEFAULT ) ; _jdbcSpillSizeMsgRefsByMsgSize = "true" . equalsIgnoreCase ( propValue ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "Using jdbcSpillSizeMsgRefsByMsgSize=" + _jdbcSpillSizeMsgRefsByMsgSize ) ; } try { _itemStorageManager = new ItemStorageManager ( ) ; _itemStorageManager . initialize ( this ) ; _persistentMessageStore = PersistenceFactory . getPersistentMessageStore ( this , _manager , _configuration ) ; _persistentMessageStore . start ( ) ; // 246935 try { String value = getProperty ( PROP_SPILL_UPPER_LIMIT , PROP_SPILL_UPPER_LIMIT_DEFAULT ) ; _spillUpperLimit = Integer . parseInt ( value ) ; value = getProperty ( PROP_SPILL_LOWER_LIMIT , PROP_SPILL_LOWER_LIMIT_DEFAULT ) ; _spillLowerLimit = Integer . parseInt ( value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Using spillUpperLimit=" + _spillUpperLimit + "; spillLowerLimit=" + _spillLowerLimit ) ; // Defect 484799 value = getProperty ( PROP_SPILL_UPPER_SIZE_LIMIT , PROP_SPILL_UPPER_SIZE_LIMIT_DEFAULT ) ; _spillUpperSizeLimit = Integer . parseInt ( value ) ; value = getProperty ( PROP_SPILL_LOWER_SIZE_LIMIT , PROP_SPILL_LOWER_SIZE_LIMIT_DEFAULT ) ; _spillLowerSizeLimit = Integer . parseInt ( value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Using spillUpperSizeLimit=" + _spillUpperSizeLimit + "; spillLowerSizeLimit=" + _spillLowerSizeLimit ) ; } catch ( NumberFormatException e ) { // No FFDC code needed if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Exception " + e + " while parsing spill limits" ) ; throw e ; } // This portion of start up processing is not interruptable synchronized ( _startLock ) { if ( _state != MessageStoreState . STATE_STARTING ) { throw new IllegalStateException ( nls . getFormattedMessage ( "INVALID_MSGSTORE_STATE_SIMS0505" , new Object [ ] { _state } , null ) ) ; } _manager . restart ( _persistentMessageStore ) ; String xmlOn = getProperty ( PROP_DUMP_RAW_XML_ON_STARTUP , null ) ; if ( null != xmlOn ) { FileWriter fw = new FileWriter ( xmlOn ) ; FormattedWriter writer = new FormattedWriter ( fw ) ; _xmlWriteRawOn ( writer , false ) ; writer . flush ( ) ; writer . close ( ) ; } // This is the TransactionFactory for this // MessageStore instance . Must call after // restart above to ensure that the PM // will cast as expected . String strMaxTransactionSize = getProperty ( PROP_TRANSACTION_SEND_LIMIT , PROP_TRANSACTION_SEND_LIMIT_DEFAULT ) ; int maxTransactionSize = Integer . parseInt ( strMaxTransactionSize ) ; _transactionFactory = new MSTransactionFactory ( this , ( PersistenceManager ) _persistentMessageStore ) ; _transactionFactory . setMaximumTransactionSize ( maxTransactionSize ) ; // If the root item stream is not found , create one . Persistable persistable = _persistentMessageStore . readRootPersistable ( ) ; if ( null == persistable ) { throw new MessageStoreRuntimeException ( "ROOT_PERSISTABLE_EXCEPTION_SIMS0504" ) ; } _uniqueIdentifierGenerator = _persistentMessageStore . getUniqueKeyGenerator ( "UniqueIdentifier" , 1000000 ) ; _uniqueLockIDGenerator = _persistentMessageStore . getUniqueKeyGenerator ( "UniqueLockValue" , 500000 ) ; _tickCountGenerator = _persistentMessageStore . getUniqueKeyGenerator ( "UniqueTickCount" , 500000 ) ; _rootMembership = new RootMembership ( this , persistable ) ; _rootMembership . initialize ( ) ; // Start the CacheLoader last _cacheLoader . start ( _cacheLoaderInterval , _messagingEngine ) ; _state = MessageStoreState . STATE_STARTED ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Message store state is : " + _state ) ; } // Defect 572575 // Only read in the value of the flag to allow disabling of the // datastore lock after startup processing has completed . This // removes the neccessity to handle it being true during initial // lock acquisition . String strDatastoreLockCanBeDisabled = getProperty ( PROP_DATASTORE_LOCK_CAN_BE_DISABLED , PROP_DATASTORE_LOCK_CAN_BE_DISABLED_DEFAULT ) ; _datastoreLockCanBeDisabled = Boolean . parseBoolean ( strDatastoreLockCanBeDisabled ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "_datastoreLockCanBeDisabled=" + _datastoreLockCanBeDisabled ) ; } catch ( Exception e ) { if ( ! ( e . getCause ( ) instanceof NonExistentLogFileException ) ) com . ibm . ws . ffdc . FFDCFilter . processException ( e , "com.ibm.ws.sib.msgstore.impl.MessageStoreImpl.start" , "755" , this ) ; SibTr . error ( tc , "STARTUP_EXCEPTION_SIMS0002" , new Object [ ] { e } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) SibTr . event ( this , tc , "Exception: " , e ) ; // Defect 326323 // Save our startup exception . // Defect 465809 // Add our startup exception to the saved list setStartupException ( e ) ; // close everything we have opened stop ( 0 ) ; // 247659 // Report that we have failed to start cleanly . reportLocalError ( ) ; throw new Exception ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "start" ) ;
public class DefaultGrailsApplication { /** * Get or create the cache of classes for the specified artefact type . * @ param artefactType The name of an artefact type * @ param create Set to true if you want non - existent caches to be created * @ return The cache of classes for the type , or null if no cache exists and create is false */ protected DefaultArtefactInfo getArtefactInfo ( String artefactType , boolean create ) { } }
DefaultArtefactInfo cache = ( DefaultArtefactInfo ) artefactInfo . get ( artefactType ) ; if ( cache == null && create ) { cache = new DefaultArtefactInfo ( ) ; artefactInfo . put ( artefactType , cache ) ; cache . updateComplete ( ) ; } return cache ;
public class ClaimBean { /** * { @ inheritDoc } */ @ Override public void destroy ( T instance , CreationalContext < T > creationalContext ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "destroy" , instance , creationalContext ) ; } creationalContext . release ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "destroy" ) ; }
public class DisparityToColorPointCloud { /** * Stereo and intrinsic camera parameters * @ param baseline Stereo baseline ( world units ) * @ param K Intrinsic camera calibration matrix of rectified camera * @ param rectifiedToColor Transform from rectified pixels to the color image pixels . * @ param minDisparity Minimum disparity that ' s computed ( pixels ) * @ param maxDisparity Maximum disparity that ' s computed ( pixels ) */ public void configure ( double baseline , DMatrixRMaj K , DMatrixRMaj rectifiedR , Point2Transform2_F64 rectifiedToColor , int minDisparity , int maxDisparity ) { } }
this . K = K ; ConvertMatrixData . convert ( rectifiedR , this . rectifiedR ) ; this . rectifiedToColor = rectifiedToColor ; this . baseline = ( float ) baseline ; this . focalLengthX = ( float ) K . get ( 0 , 0 ) ; this . focalLengthY = ( float ) K . get ( 1 , 1 ) ; this . centerX = ( float ) K . get ( 0 , 2 ) ; this . centerY = ( float ) K . get ( 1 , 2 ) ; this . minDisparity = minDisparity ; this . rangeDisparity = maxDisparity - minDisparity ;
public class CmsReport { /** * Returns if the report generated an error output . < p > * @ return true if the report generated an error , otherwise false */ public boolean hasError ( ) { } }
A_CmsReportThread thread = OpenCms . getThreadStore ( ) . retrieveThread ( m_paramThread ) ; if ( thread != null ) { return thread . hasError ( ) ; } else { return false ; }
public class InstanceChangeStreamListenerImpl { /** * Starts all streams . */ public void start ( ) { } }
instanceLock . writeLock ( ) . lock ( ) ; try { for ( final Map . Entry < MongoNamespace , NamespaceChangeStreamListener > streamerEntry : nsStreamers . entrySet ( ) ) { streamerEntry . getValue ( ) . start ( ) ; } } finally { instanceLock . writeLock ( ) . unlock ( ) ; }
public class DkimAttributesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DkimAttributes dkimAttributes , ProtocolMarshaller protocolMarshaller ) { } }
if ( dkimAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( dkimAttributes . getSigningEnabled ( ) , SIGNINGENABLED_BINDING ) ; protocolMarshaller . marshall ( dkimAttributes . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( dkimAttributes . getTokens ( ) , TOKENS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PermissionUtil { /** * Gets the { @ code long } representation of the effective permissions allowed for this { @ link net . dv8tion . jda . core . entities . Role Role } * in this { @ link net . dv8tion . jda . core . entities . Channel Channel } . This can be used in conjunction with * { @ link net . dv8tion . jda . core . Permission # getPermissions ( long ) Permission . getPermissions ( long ) } to easily get a list of all * { @ link net . dv8tion . jda . core . Permission Permissions } that this role can use in this { @ link net . dv8tion . jda . core . entities . Channel Channel } . * @ param channel * The { @ link net . dv8tion . jda . core . entities . Channel Channel } in which permissions are being checked . * @ param role * The { @ link net . dv8tion . jda . core . entities . Role Role } whose permissions are being checked . * @ throws IllegalArgumentException * if any of the provided parameters is { @ code null } * or the provided entities are not from the same guild * @ return The { @ code long } representation of the effective permissions that this { @ link net . dv8tion . jda . core . entities . Role Role } * has in this { @ link net . dv8tion . jda . core . entities . Channel Channel } */ public static long getEffectivePermission ( Channel channel , Role role ) { } }
Checks . notNull ( channel , "Channel" ) ; Checks . notNull ( role , "Role" ) ; Guild guild = channel . getGuild ( ) ; if ( ! guild . equals ( role . getGuild ( ) ) ) throw new IllegalArgumentException ( "Provided channel and role are not of the same guild!" ) ; long permissions = role . getPermissionsRaw ( ) | guild . getPublicRole ( ) . getPermissionsRaw ( ) ; PermissionOverride publicOverride = channel . getPermissionOverride ( guild . getPublicRole ( ) ) ; PermissionOverride roleOverride = channel . getPermissionOverride ( role ) ; if ( publicOverride != null ) { permissions &= ~ publicOverride . getDeniedRaw ( ) ; permissions |= publicOverride . getAllowedRaw ( ) ; } if ( roleOverride != null ) { permissions &= ~ roleOverride . getDeniedRaw ( ) ; permissions |= roleOverride . getAllowedRaw ( ) ; } return permissions ;
public class NetworkUtils { /** * Tests if a network location is reachable . This is best effort and may give false * not reachable . * @ param endpoint the endpoint to connect to * @ param timeout Open connection will wait for this timeout . * @ param retryCount In case of connection timeout try retryCount times . * @ param retryInterval the interval to retryCount * @ return true if the network location is reachable */ public static boolean isLocationReachable ( InetSocketAddress endpoint , Duration timeout , int retryCount , Duration retryInterval ) { } }
int retryLeft = retryCount ; while ( retryLeft > 0 ) { try ( Socket s = new Socket ( ) ) { s . connect ( endpoint , ( int ) timeout . toMillis ( ) ) ; return true ; } catch ( IOException e ) { } finally { SysUtils . sleep ( retryInterval ) ; retryLeft -- ; } } LOG . log ( Level . FINE , "Failed to connect to: {0}" , endpoint . toString ( ) ) ; return false ;
public class GoogleCloudStorageImpl { /** * See { @ link GoogleCloudStorage # listObjectInfo ( String , String , String ) } for details about * expected behavior . */ @ Override public List < GoogleCloudStorageItemInfo > listObjectInfo ( String bucketName , String objectNamePrefix , String delimiter ) throws IOException { } }
return listObjectInfo ( bucketName , objectNamePrefix , delimiter , MAX_RESULTS_UNLIMITED ) ;
public class StringUtils { /** * Take a { @ code String } that is a delimited list and convert it into * a { @ code String } array . * < p > A single { @ code delimiter } may consist of more than one character , * but it will still be considered as a single delimiter string , rather * than as bunch of potential delimiter characters , in contrast to * tokenizeToStringArray . * @ param str the input { @ code String } * @ param delimiter the delimiter between elements ( this is a single delimiter , * rather than a bunch individual delimiter characters ) * @ param charsToDelete a set of characters to delete ; useful for deleting unwanted * line breaks : e . g . " \ r \ n \ f " will delete all new lines and line feeds in a { @ code String } * @ return an array of the tokens in the list */ public static String [ ] delimitedListToStringArray ( String str , String delimiter , String charsToDelete ) { } }
if ( str == null ) { return new String [ 0 ] ; } if ( delimiter == null ) { return new String [ ] { str } ; } List < String > result = new ArrayList < > ( ) ; if ( "" . equals ( delimiter ) ) { for ( int i = 0 ; i < str . length ( ) ; i ++ ) { result . add ( deleteAny ( str . substring ( i , i + 1 ) , charsToDelete ) ) ; } } else { int pos = 0 ; int delPos ; while ( ( delPos = str . indexOf ( delimiter , pos ) ) != - 1 ) { result . add ( deleteAny ( str . substring ( pos , delPos ) , charsToDelete ) ) ; pos = delPos + delimiter . length ( ) ; } if ( str . length ( ) > 0 && pos <= str . length ( ) ) { // Add rest of String , but not in case of empty input . result . add ( deleteAny ( str . substring ( pos ) , charsToDelete ) ) ; } } return result . toArray ( EMPTY_STRING_ARRAY ) ;
public class MessageProcessor { /** * Send the given message to its destinations across the message bus and any response sent back will be passed to * the given listener . Use this for request - response messages where you expect to get a non - void response back . * The response listener should close its associated consumer since typically there is only a single response that * is expected . This is left to the listener to do in case there are special circumstances where the listener does * expect multiple response messages . * If the caller merely wants to wait for a single response and obtain the response message to process it further , * consider using instead the method { @ link # sendRPC } and use its returned Future to wait for the response , rather * than having to supply your own response listener . * @ param context information that determines where the message is sent * @ param basicMessage the request message to send with optional headers included * @ param responseListener The listener that will process the response of the request . This listener should close * its associated consumer when appropriate . * @ param headers headers for the JMS transport that will override same - named headers in the basic message * @ return the RPC context which includes information about the handling of the expected response * @ throws JMSException any error * @ see org . hawkular . bus . common . ConnectionContextFactory # createProducerConnectionContext ( Endpoint ) */ public < T extends BasicMessage > RPCConnectionContext sendAndListen ( ProducerConnectionContext context , BasicMessage basicMessage , BasicMessageListener < T > responseListener , Map < String , String > headers ) throws JMSException { } }
if ( context == null ) { throw new IllegalArgumentException ( "context must not be null" ) ; } if ( basicMessage == null ) { throw new IllegalArgumentException ( "message must not be null" ) ; } if ( responseListener == null ) { throw new IllegalArgumentException ( "response listener must not be null" ) ; } // create the JMS message to be sent Message msg = createMessage ( context , basicMessage , headers ) ; // if the message is correlated with another , put the correlation ID in the Message to be sent if ( basicMessage . getCorrelationId ( ) != null ) { msg . setJMSCorrelationID ( basicMessage . getCorrelationId ( ) . toString ( ) ) ; } if ( basicMessage . getMessageId ( ) != null ) { log . debugf ( "Non-null message ID [%s] will be ignored and a new one generated" , basicMessage . getMessageId ( ) ) ; basicMessage . setMessageId ( null ) ; } MessageProducer producer = context . getMessageProducer ( ) ; if ( producer == null ) { throw new NullPointerException ( "Cannot send request-response message - the producer is null" ) ; } // prepare for the response prior to sending the request Session session = context . getSession ( ) ; if ( session == null ) { throw new NullPointerException ( "Cannot send request-response message - the session is null" ) ; } TemporaryQueue responseQueue = session . createTemporaryQueue ( ) ; MessageConsumer responseConsumer = session . createConsumer ( responseQueue ) ; RPCConnectionContext rpcContext = new RPCConnectionContext ( ) ; rpcContext . copy ( context ) ; rpcContext . setDestination ( responseQueue ) ; rpcContext . setMessageConsumer ( responseConsumer ) ; rpcContext . setRequestMessage ( msg ) ; rpcContext . setResponseListener ( responseListener ) ; responseListener . setConsumerConnectionContext ( rpcContext ) ; responseConsumer . setMessageListener ( responseListener ) ; msg . setJMSReplyTo ( responseQueue ) ; // now send the message to the broker producer . send ( msg ) ; // put message ID into the message in case the caller wants to correlate it with another record MessageId messageId = new MessageId ( msg . getJMSMessageID ( ) ) ; basicMessage . setMessageId ( messageId ) ; return rpcContext ;
public class HttpApiFactory { /** * TODO hacky . . . */ private static HeaderMap wrapMultiMap ( MultiMap headers ) { } }
return new HeaderMap ( ) { private static final long serialVersionUID = - 1406124274678587935L ; @ Override ( ) public String get ( String key ) { return headers . get ( key ) ; } } ;
public class NameServerImpl { /** * Finds an address for an identifier locally . * @ param id an identifier * @ return an Internet socket address */ @ Override public InetSocketAddress lookup ( final Identifier id ) { } }
LOG . log ( Level . FINE , "id: {0}" , id ) ; return idToAddrMap . get ( id ) ;
public class AbstractValidate { /** * Method without varargs to increase performance */ public < T > T [ ] validIndex ( final T [ ] array , final int index , final String message ) { } }
notNull ( array ) ; if ( index < 0 || index >= array . length ) { failIndexOutOfBounds ( message ) ; } return array ;
public class NamedResolverMap { /** * Return the string value indicated by the given numeric key . * @ param key The key of the value to return . * @ param dfl The default value to return , if the key is absent . * @ return The string value stored under the given key , or dfl . * @ throws IllegalArgumentException if the value is present , but not a * string . */ public String getStringOrDefault ( int key , @ NonNull String dfl ) { } }
Any3 < Boolean , Integer , String > value = data . getOrDefault ( Any2 . < Integer , String > left ( key ) , Any3 . < Boolean , Integer , String > create3 ( dfl ) ) ; return value . get3 ( ) . orElseThrow ( ( ) -> new IllegalArgumentException ( "expected string argument for param " + key ) ) ;
public class RecordConverter { /** * Convert a set of records in to a matrix * @ param matrix the records ot convert * @ return the matrix for the records */ public static List < List < Writable > > toRecords ( INDArray matrix ) { } }
List < List < Writable > > ret = new ArrayList < > ( ) ; for ( int i = 0 ; i < matrix . rows ( ) ; i ++ ) { ret . add ( RecordConverter . toRecord ( matrix . getRow ( i ) ) ) ; } return ret ;
public class Log { /** * Send a { @ link # Constants . DEBUG } log message . * @ param msg * The message you would like logged . */ public static int d ( String msg ) { } }
// This is a quick check to avoid the expensive stack trace reflection . if ( ! activated ) { return 0 ; } String caller = LogHelper . getCaller ( ) ; if ( caller != null ) { return d ( caller , msg ) ; } return 0 ;
public class DataBindingUtils { /** * Associations both sides of any bidirectional relationships found in the object and source map to bind * @ param object The object * @ param source The source map * @ param persistentEntity The PersistentEntity for the object */ public static void assignBidirectionalAssociations ( Object object , Map source , PersistentEntity persistentEntity ) { } }
if ( source == null ) { return ; } for ( Object key : source . keySet ( ) ) { String propertyName = key . toString ( ) ; if ( propertyName . indexOf ( '.' ) > - 1 ) { propertyName = propertyName . substring ( 0 , propertyName . indexOf ( '.' ) ) ; } PersistentProperty prop = persistentEntity . getPropertyByName ( propertyName ) ; if ( prop != null && prop instanceof OneToOne && ( ( OneToOne ) prop ) . isBidirectional ( ) ) { Object val = source . get ( key ) ; PersistentProperty otherSide = ( ( OneToOne ) prop ) . getInverseSide ( ) ; if ( val != null && otherSide != null ) { MetaClass mc = GroovySystem . getMetaClassRegistry ( ) . getMetaClass ( val . getClass ( ) ) ; try { mc . setProperty ( val , otherSide . getName ( ) , object ) ; } catch ( Exception e ) { // ignore } } } }
public class CmsLoginUI { /** * Initializes the login view . < p > * @ param preselectedOu a potential preselected OU */ public void showLoginView ( String preselectedOu ) { } }
VerticalLayout content = new VerticalLayout ( ) ; content . setSizeFull ( ) ; m_targetOpener = new CmsLoginTargetOpener ( A_CmsUI . get ( ) ) ; // content . setExpandRatio ( m _ targetOpener , 0f ) ; content . addComponent ( m_loginForm ) ; content . setComponentAlignment ( m_loginForm , Alignment . MIDDLE_CENTER ) ; content . setExpandRatio ( m_loginForm , 1 ) ; setContent ( content ) ; if ( preselectedOu == null ) { preselectedOu = "/" ; } m_loginForm . selectOrgUnit ( preselectedOu ) ;
public class MessageResolver { /** * ロケールとキーを指定してメッセージを取得する 。 * < p > ロケールに該当する値を取得する 。 * @ param locale ロケール * @ param key メッセージキー * @ return 該当するロケールのメッセージが見つからない場合は 、 デフォルトのリソースから取得する 。 */ public String getMessage ( final MSLocale locale , final String key ) { } }
if ( locale == null ) { return loadResource ( null ) . getMessage ( key ) ; } else { return loadResource ( locale . getLocale ( ) ) . getMessage ( key ) ; }
public class ConfigUtil { /** * Convert the value to a boolean . * @ param value The value instance . * @ return The boolean value . */ public static boolean asBoolean ( Object value ) { } }
if ( value instanceof Boolean ) { return ( Boolean ) value ; } else if ( value instanceof Double || value instanceof Float ) { throw new IncompatibleValueException ( "Unable to convert double value to boolean" ) ; } else if ( value instanceof Number ) { long l = ( ( Number ) value ) . longValue ( ) ; if ( l == 0L ) return false ; if ( l == 1L ) return true ; throw new IncompatibleValueException ( "Unable to convert number " + l + " to boolean" ) ; } else if ( value instanceof CharSequence ) { switch ( value . toString ( ) . toLowerCase ( ) ) { case "0" : case "n" : case "f" : case "no" : case "false" : return false ; case "1" : case "y" : case "t" : case "yes" : case "true" : return true ; default : throw new IncompatibleValueException ( String . format ( "Unable to parse the string \"%s\" to boolean" , Strings . escape ( value . toString ( ) ) ) ) ; } } throw new IncompatibleValueException ( "Unable to convert " + value . getClass ( ) . getSimpleName ( ) + " to a boolean" ) ;
public class StringArrayDecoder { /** * Converts an object array containing an exported row values into an * array of their string representations */ @ Override public String [ ] decode ( long generation , String tableName , List < VoltType > types , List < String > names , String [ ] to , Object [ ] fields ) throws RuntimeException { } }
Preconditions . checkArgument ( fields != null && fields . length > m_firstFieldOffset , "null or inapropriately sized export row array" ) ; /* * Builds a list of string formatters that reflects the row * column types . */ StringFieldDecoder [ ] fieldDecoders ; if ( ! m_fieldDecoders . containsKey ( generation ) ) { int fieldCount = 0 ; Map < String , DecodeType > typeMap = getTypeMap ( generation , types , names ) ; ImmutableList . Builder < StringFieldDecoder > lb = ImmutableList . builder ( ) ; for ( org . voltdb . exportclient . decode . DecodeType dt : typeMap . values ( ) ) { lb . add ( dt . accept ( decodingVisitor , fieldCount ++ , null ) ) ; } fieldDecoders = lb . build ( ) . toArray ( new StringFieldDecoder [ 0 ] ) ; m_fieldDecoders . put ( generation , fieldDecoders ) ; } else { fieldDecoders = m_fieldDecoders . get ( generation ) ; } if ( to == null || to . length < fieldDecoders . length ) { to = new String [ fieldDecoders . length ] ; } for ( int i = m_firstFieldOffset , j = 0 ; i < fields . length && j < fieldDecoders . length ; ++ i , ++ j ) { fieldDecoders [ j ] . decode ( to , fields [ i ] ) ; } return to ;
public class JSONObjectException { /** * Method that can be called to either create a new JsonMappingException * ( if underlying exception is not a JsonMappingException ) , or augment * given exception with given path / reference information . * This version of method is called when the reference is through an * index , which happens with arrays and Collections . */ public static JSONObjectException wrapWithPath ( Throwable src , Object refFrom , int index ) { } }
return wrapWithPath ( src , new Reference ( refFrom , index ) ) ;
public class CSSColorHelper { /** * Get the passed values as CSS HSL color value * @ param fHue * Hue - is scaled to 0-359 * @ param fSaturation * Saturation - is scaled to 0-100 * @ param fLightness * Lightness - is scaled to 0-100 * @ return The CSS string to use */ @ Nonnull @ Nonempty public static String getHSLColorValue ( final float fHue , final float fSaturation , final float fLightness ) { } }
return new StringBuilder ( 18 ) . append ( CCSSValue . PREFIX_HSL_OPEN ) . append ( getHSLHueValue ( fHue ) ) . append ( ',' ) . append ( getHSLPercentageValue ( fSaturation ) ) . append ( "%," ) . append ( getHSLPercentageValue ( fLightness ) ) . append ( "%" ) . append ( CCSSValue . SUFFIX_HSL_CLOSE ) . toString ( ) ;
public class DeleteXssMatchSetRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteXssMatchSetRequest deleteXssMatchSetRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteXssMatchSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteXssMatchSetRequest . getXssMatchSetId ( ) , XSSMATCHSETID_BINDING ) ; protocolMarshaller . marshall ( deleteXssMatchSetRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ListLens { /** * Convenience static factory method for creating a lens over a copy of a list . Useful for composition to avoid * mutating a list reference . * @ param < X > the list element type * @ return a lens that focuses on copies of lists */ public static < X > Lens . Simple < List < X > , List < X > > asCopy ( ) { } }
return simpleLens ( ArrayList :: new , ( xs , ys ) -> ys ) ;
public class DefaultWardenService { /** * ~ Methods * * * * * */ @ Override public void dispose ( ) { } }
super . dispose ( ) ; _alertService . dispose ( ) ; _userService . dispose ( ) ; _metricService . dispose ( ) ; _serviceManagementRecordService . dispose ( ) ; _dashboardService . dispose ( ) ; _shutdownScheduledExecutorService ( ) ;
public class UpdateBuilder { /** * Updates the fields in the document referred to by this DocumentReference . If the document * doesn ' t exist yet , the update will fail . * @ param documentReference The DocumentReference to update . * @ param field The first field to set . * @ param value The first value to set . * @ param moreFieldsAndValues String and Object pairs with more fields to be set . * @ return The instance for chaining . */ @ Nonnull public T update ( @ Nonnull DocumentReference documentReference , @ Nonnull String field , @ Nullable Object value , Object ... moreFieldsAndValues ) { } }
return performUpdate ( documentReference , Precondition . exists ( true ) , FieldPath . fromDotSeparatedString ( field ) , value , moreFieldsAndValues ) ;
public class JTAUserTransactionAdapter { /** * / * ( non - Javadoc ) * @ see javax . batch . spi . TransactionManagerSPI # setTransactionTimeout ( int ) */ @ Override public void setTransactionTimeout ( int seconds ) throws TransactionManagementException { } }
logger . entering ( CLASSNAME , "setTransactionTimeout" , seconds ) ; try { userTran . setTransactionTimeout ( seconds ) ; } catch ( SystemException e ) { throw new TransactionManagementException ( e ) ; } logger . exiting ( CLASSNAME , "setTransactionTimeout" ) ;
public class DSClientFactory { /** * Gets the socket options . * @ param connectionProperties * the connection properties * @ return the socket options */ private SocketOptions getSocketOptions ( Properties connectionProperties ) { } }
// SocketOptions SocketOptions socketConfig = new SocketOptions ( ) ; String connectTimeoutMillis = connectionProperties . getProperty ( CassandraConstants . SOCKET_TIMEOUT ) ; String readTimeoutMillis = connectionProperties . getProperty ( "readTimeoutMillis" ) ; String keepAlive = connectionProperties . getProperty ( "keepAlive" ) ; String reuseAddress = connectionProperties . getProperty ( "reuseAddress" ) ; String soLinger = connectionProperties . getProperty ( "soLinger" ) ; String tcpNoDelay = connectionProperties . getProperty ( "tcpNoDelay" ) ; String receiveBufferSize = connectionProperties . getProperty ( "receiveBufferSize" ) ; String sendBufferSize = connectionProperties . getProperty ( "sendBufferSize" ) ; if ( ! StringUtils . isBlank ( connectTimeoutMillis ) ) { socketConfig . setConnectTimeoutMillis ( new Integer ( connectTimeoutMillis ) ) ; } if ( ! StringUtils . isBlank ( readTimeoutMillis ) ) { socketConfig . setReadTimeoutMillis ( new Integer ( readTimeoutMillis ) ) ; } if ( ! StringUtils . isBlank ( keepAlive ) ) { socketConfig . setKeepAlive ( new Boolean ( keepAlive ) ) ; } if ( ! StringUtils . isBlank ( reuseAddress ) ) { socketConfig . setReuseAddress ( new Boolean ( reuseAddress ) ) ; } if ( ! StringUtils . isBlank ( soLinger ) ) { socketConfig . setSoLinger ( new Integer ( soLinger ) ) ; } if ( ! StringUtils . isBlank ( tcpNoDelay ) ) { socketConfig . setTcpNoDelay ( new Boolean ( tcpNoDelay ) ) ; } if ( ! StringUtils . isBlank ( receiveBufferSize ) ) { socketConfig . setReceiveBufferSize ( new Integer ( receiveBufferSize ) ) ; } if ( ! StringUtils . isBlank ( sendBufferSize ) ) { socketConfig . setSendBufferSize ( new Integer ( sendBufferSize ) ) ; } return socketConfig ;
public class ClientConnectionTimingsBuilder { /** * Returns a newly - created { @ link ClientConnectionTimings } instance . */ public ClientConnectionTimings build ( ) { } }
if ( socketConnectStartTimeMicros > 0 && ! socketConnectEndSet ) { logger . warn ( "Should call socketConnectEnd() if socketConnectStart() was invoked." ) ; } if ( pendingAcquisitionStartTimeMicros > 0 && ! pendingAcquisitionEndSet ) { logger . warn ( "Should call pendingAcquisitionEnd() if pendingAcquisitionStart() was invoked." ) ; } return new ClientConnectionTimings ( connectionAcquisitionStartTimeMicros , System . nanoTime ( ) - connectionAcquisitionStartNanos , dnsResolutionEndSet ? connectionAcquisitionStartTimeMicros : - 1 , dnsResolutionEndSet ? dnsResolutionEndNanos - connectionAcquisitionStartNanos : - 1 , socketConnectEndSet ? socketConnectStartTimeMicros : - 1 , socketConnectEndSet ? socketConnectEndNanos - socketConnectStartNanos : - 1 , pendingAcquisitionEndSet ? pendingAcquisitionStartTimeMicros : - 1 , pendingAcquisitionEndSet ? pendingAcquisitionEndNanos - pendingAcquisitionStartNanos : - 1 ) ;
public class AddMetadata { /** * If any operation requires an API key we generate a setter on the builder . * @ return True if any operation requires an API key . False otherwise . */ private static boolean requiresApiKey ( ServiceModel serviceModel ) { } }
return serviceModel . getOperations ( ) . values ( ) . stream ( ) . anyMatch ( Operation :: requiresApiKey ) ;
public class Utilities { /** * This method splits the input string into several json objects . This is * needed because Atmosphere sends more than one json in a single response . * < br > * For example for the input { { test } { test2 } } { test3 } it would produce the * following list : [ { { test } { test2 } } , { test3 } ] * @ param combinedJsonString The JSON input to be splitted * @ return List of JSON strings */ public static List < String > splitJson ( String combinedJsonString ) { } }
List < String > result = new ArrayList < > ( ) ; int numberOfOpeningBraces = 0 ; boolean isInsideString = false ; /* * A string starts with an unescaped " and ends with an unescaped " } or * { within a string must be ignored . */ StringBuilder jsonBuffer = new StringBuilder ( ) ; for ( int i = 0 ; i < combinedJsonString . length ( ) ; i ++ ) { char c = combinedJsonString . charAt ( i ) ; if ( c == '"' && i > 0 && combinedJsonString . charAt ( i - 1 ) != '\\' ) { // only switch insideString if " is not escaped isInsideString = ! isInsideString ; } if ( c == '{' && ! isInsideString ) { numberOfOpeningBraces ++ ; } else if ( c == '}' && ! isInsideString ) { numberOfOpeningBraces -- ; } jsonBuffer . append ( c ) ; if ( numberOfOpeningBraces == 0 && jsonBuffer . length ( ) != 0 ) { // Prevent empty strings to be added to the result list in case // there are spaces between JSON objects if ( jsonBuffer . toString ( ) . charAt ( 0 ) == '{' ) { result . add ( jsonBuffer . toString ( ) ) ; } jsonBuffer . setLength ( 0 ) ; } } return result ;
public class LogRecordTextHandler { /** * Stops this handler and close its output streams . */ public void stop ( ) { } }
if ( this . writer != null ) { this . writer . stop ( ) ; this . writer . getLogRepositoryManager ( ) . stop ( ) ; this . writer = null ; }
public class JSONML { /** * Parse XML values and store them in a JSONArray . * @ param x The XMLTokener containing the source string . * @ param arrayForm true if array form , false if object form . * @ param ja The JSONArray that is containing the current tag or null * if we are at the outermost level . * @ return A JSONArray if the value is the outermost tag , otherwise null . * @ throws JSONException */ private static Object parse ( XMLTokener x , boolean arrayForm , JSONArray ja ) throws JSONException { } }
String attribute ; char c ; String closeTag = null ; int i ; JSONArray newja = null ; JSONObject newjo = null ; Object token ; String tagName = null ; // Test for and skip past these forms : while ( true ) { if ( ! x . more ( ) ) { throw x . syntaxError ( "Bad XML" ) ; } token = x . nextContent ( ) ; if ( token == XML . LT ) { token = x . nextToken ( ) ; if ( token instanceof Character ) { if ( token == XML . SLASH ) { // Close tag < / token = x . nextToken ( ) ; if ( ! ( token instanceof String ) ) { throw new JSONException ( "Expected a closing name instead of '" + token + "'." ) ; } if ( x . nextToken ( ) != XML . GT ) { throw x . syntaxError ( "Misshaped close tag" ) ; } return token ; } else if ( token == XML . BANG ) { c = x . next ( ) ; if ( c == '-' ) { if ( x . next ( ) == '-' ) { x . skipPast ( "-->" ) ; } else { x . back ( ) ; } } else if ( c == '[' ) { token = x . nextToken ( ) ; if ( token . equals ( "CDATA" ) && x . next ( ) == '[' ) { if ( ja != null ) { ja . put ( x . nextCDATA ( ) ) ; } } else { throw x . syntaxError ( "Expected 'CDATA['" ) ; } } else { i = 1 ; do { token = x . nextMeta ( ) ; if ( token == null ) { throw x . syntaxError ( "Missing '>' after '<!'." ) ; } else if ( token == XML . LT ) { i += 1 ; } else if ( token == XML . GT ) { i -= 1 ; } } while ( i > 0 ) ; } } else if ( token == XML . QUEST ) { x . skipPast ( "?>" ) ; } else { throw x . syntaxError ( "Misshaped tag" ) ; } // Open tag < } else { if ( ! ( token instanceof String ) ) { throw x . syntaxError ( "Bad tagName '" + token + "'." ) ; } tagName = ( String ) token ; newja = new JSONArray ( ) ; newjo = new JSONObject ( ) ; if ( arrayForm ) { newja . put ( tagName ) ; if ( ja != null ) { ja . put ( newja ) ; } } else { newjo . put ( "tagName" , tagName ) ; if ( ja != null ) { ja . put ( newjo ) ; } } token = null ; for ( ; ; ) { if ( token == null ) { token = x . nextToken ( ) ; } if ( token == null ) { throw x . syntaxError ( "Misshaped tag" ) ; } if ( ! ( token instanceof String ) ) { break ; } // attribute = value attribute = ( String ) token ; if ( ! arrayForm && ( "tagName" . equals ( attribute ) || "childNode" . equals ( attribute ) ) ) { throw x . syntaxError ( "Reserved attribute." ) ; } token = x . nextToken ( ) ; if ( token == XML . EQ ) { token = x . nextToken ( ) ; if ( ! ( token instanceof String ) ) { throw x . syntaxError ( "Missing value" ) ; } newjo . accumulate ( attribute , XML . stringToValue ( ( String ) token ) ) ; token = null ; } else { newjo . accumulate ( attribute , "" ) ; } } if ( arrayForm && newjo . length ( ) > 0 ) { newja . put ( newjo ) ; } // Empty tag < . . . / > if ( token == XML . SLASH ) { if ( x . nextToken ( ) != XML . GT ) { throw x . syntaxError ( "Misshaped tag" ) ; } if ( ja == null ) { if ( arrayForm ) { return newja ; } else { return newjo ; } } // Content , between < . . . > and < / . . . > } else { if ( token != XML . GT ) { throw x . syntaxError ( "Misshaped tag" ) ; } closeTag = ( String ) parse ( x , arrayForm , newja ) ; if ( closeTag != null ) { if ( ! closeTag . equals ( tagName ) ) { throw x . syntaxError ( "Mismatched '" + tagName + "' and '" + closeTag + "'" ) ; } tagName = null ; if ( ! arrayForm && newja . length ( ) > 0 ) { newjo . put ( "childNodes" , newja ) ; } if ( ja == null ) { if ( arrayForm ) { return newja ; } else { return newjo ; } } } } } } else { if ( ja != null ) { ja . put ( token instanceof String ? XML . stringToValue ( ( String ) token ) : token ) ; } } }
public class CanonicalStore { /** * Store the given { @ link ClassLoader } indexed by the given { @ link ClassLoaderIdentity } . * This method can safely be called from multiple threads concurrently , but the * ordering of concurrent store operations for the same key is unspecified . */ public void store ( K key , V loader ) { } }
// Clean up stale entries on every put . // This should avoid a slow memory leak of reference objects . cleanUpStaleEntries ( ) ; map . put ( key , new WeakKeyedRef < K , V > ( key , loader , q ) ) ;
public class DateUtil { /** * 设置分钟 , 0-59. */ public static Date setMinutes ( @ NotNull final Date date , int amount ) { } }
return DateUtils . setMinutes ( date , amount ) ;
public class SdpParserPipeline { /** * Adds an attribute parser to the pipeline . * @ param parser * The parser to be registered */ public void addAttributeParser ( String type , SdpParser < ? extends AttributeField > parser ) { } }
synchronized ( this . attributeParsers ) { this . attributeParsers . put ( type , parser ) ; }
public class BitMatrix { /** * < p > Flips the given bit . < / p > * @ param x The horizontal component ( i . e . which column ) * @ param y The vertical component ( i . e . which row ) */ public void flip ( int x , int y ) { } }
int offset = y * rowSize + ( x / 32 ) ; bits [ offset ] ^= 1 << ( x & 0x1f ) ;
public class DoubleHistogram { /** * Produce textual representation of the value distribution of histogram data by percentile . The distribution is * output with exponentially increasing resolution , with each exponentially decreasing half - distance containing * < i > dumpTicksPerHalf < / i > percentile reporting tick points . * @ param printStream Stream into which the distribution will be output * @ param percentileTicksPerHalfDistance The number of reporting points per exponentially decreasing half - distance * @ param outputValueUnitScalingRatio The scaling factor by which to divide histogram recorded values units in * output * @ param useCsvFormat Output in CSV format if true . Otherwise use plain text form . */ public void outputPercentileDistribution ( final PrintStream printStream , final int percentileTicksPerHalfDistance , final Double outputValueUnitScalingRatio , final boolean useCsvFormat ) { } }
integerValuesHistogram . outputPercentileDistribution ( printStream , percentileTicksPerHalfDistance , outputValueUnitScalingRatio / integerToDoubleValueConversionRatio , useCsvFormat ) ;
public class PropertiesInputGridScreen { /** * Add the navigation button ( s ) to the left of the grid row . */ public void addNavButtons ( ) { } }
if ( ( m_iDisplayFieldDesc & ScreenConstants . SELECT_MODE ) != ScreenConstants . SELECT_MODE ) new SCannedBox ( this . getNextLocation ( ScreenConstants . FIRST_SCREEN_LOCATION , ScreenConstants . SET_ANCHOR ) , this , null , ScreenConstants . DEFAULT_DISPLAY , null , null , MenuConstants . DELETE , MenuConstants . DELETE , null ) ; if ( ! ( this . getParentScreen ( ) instanceof Screen ) ) super . addNavButtons ( ) ; // Only allow form screen if not sub - window
public class ICalComponent { /** * Removes all sub - components of the given class from this component . * @ param clazz the class of the components to remove ( e . g . " VEvent . class " ) * @ param < T > the component class * @ return the removed components ( this list is immutable ) */ public < T extends ICalComponent > List < T > removeComponents ( Class < T > clazz ) { } }
List < ICalComponent > removed = components . removeAll ( clazz ) ; return castList ( removed , clazz ) ;
public class WikipediaXMLReader { /** * Returns the next RevisionTask . * @ return RevisionTask . * @ throws ArticleReaderException * if the parsing of the input fails */ public Task < Revision > next ( ) throws ArticleReaderException { } }
try { this . keywords . reset ( ) ; // if new article read header , otherwise use old one if ( this . lastTaskCompleted ) { this . lastTaskCompleted = false ; this . taskPartCounter = 1 ; this . taskRevisionCounter = - 1 ; if ( ! readHeader ( ) ) { this . lastTaskCompleted = true ; return null ; } } else { this . taskPartCounter ++ ; } Task < Revision > task = new Task < Revision > ( this . taskHeader , this . taskPartCounter ) ; task . add ( readRevision ( ) ) ; int r = read ( ) ; while ( r != - 1 ) { if ( this . keywords . check ( ( char ) r ) ) { switch ( this . keywords . getValue ( ) ) { case KEY_START_REVISION : if ( task . byteSize ( ) >= LIMIT_TASK_SIZE_REVISIONS ) { this . lastTaskCompleted = false ; if ( this . taskPartCounter == 1 ) { task . setTaskType ( TaskTypes . TASK_PARTIAL_FIRST ) ; } else { task . setTaskType ( TaskTypes . TASK_PARTIAL ) ; } return task ; } task . add ( readRevision ( ) ) ; break ; case KEY_END_PAGE : this . lastTaskCompleted = true ; if ( this . taskPartCounter > 1 ) { task . setTaskType ( TaskTypes . TASK_PARTIAL_LAST ) ; } return task ; default : throw new IOException ( ) ; } this . keywords . reset ( ) ; } r = read ( ) ; } throw ErrorFactory . createArticleReaderException ( ErrorKeys . DELTA_CONSUMERS_TASK_READER_WIKIPEDIAXMLREADER_UNEXPECTED_END_OF_FILE ) ; } catch ( ArticleReaderException e ) { throw e ; } catch ( Exception e ) { throw new ArticleReaderException ( e ) ; }
public class ModelHelper { /** * Helper to convert Segment Id into Segment object . * @ param segment The Segment Id . * @ return New instance of Segment . */ public static final Segment encode ( final SegmentId segment ) { } }
Preconditions . checkNotNull ( segment , "segment" ) ; return new Segment ( segment . getStreamInfo ( ) . getScope ( ) , segment . getStreamInfo ( ) . getStream ( ) , segment . getSegmentId ( ) ) ;
public class RedisClusterClient { /** * Shutdown this client and close all open connections asynchronously . The client should be discarded after calling * shutdown . * @ param quietPeriod the quiet period as described in the documentation * @ param timeout the maximum amount of time to wait until the executor is shutdown regardless if a task was submitted * during the quiet period * @ param timeUnit the unit of { @ code quietPeriod } and { @ code timeout } * @ since 4.4 */ @ Override public CompletableFuture < Void > shutdownAsync ( long quietPeriod , long timeout , TimeUnit timeUnit ) { } }
if ( clusterTopologyRefreshActivated . compareAndSet ( true , false ) ) { ScheduledFuture < ? > scheduledFuture = clusterTopologyRefreshFuture . get ( ) ; try { scheduledFuture . cancel ( false ) ; clusterTopologyRefreshFuture . set ( null ) ; } catch ( Exception e ) { logger . debug ( "Could not cancel Cluster topology refresh" , e ) ; } } return super . shutdownAsync ( quietPeriod , timeout , timeUnit ) ;
public class JmolTools { /** * TODO : move this to AtomInfo class */ public static final String getPdbInfo ( Atom a , boolean printResName ) { } }
String aa3 = "" ; String chain1 = "" ; String res1 = "" ; if ( a != null ) { Group g1 = a . getGroup ( ) ; if ( g1 != null ) { aa3 = g1 . getPDBName ( ) ; res1 = g1 . getResidueNumber ( ) . toString ( ) ; Chain ch1 = g1 . getChain ( ) ; if ( ch1 != null ) chain1 = ch1 . getId ( ) ; } } StringBuffer buf = new StringBuffer ( ) ; if ( printResName ) { if ( ! aa3 . equals ( "" ) ) { buf . append ( "[" ) ; buf . append ( aa3 ) ; buf . append ( "]" ) ; } } if ( ! res1 . equals ( "" ) ) { // let ' s check if there is an insertion code . . . Matcher matcher = inscodePatter . matcher ( res1 ) ; boolean found = matcher . find ( ) ; if ( ! found ) { System . err . println ( "JmolTools: could not parse the residue number string " + res1 ) ; buf . append ( res1 ) ; } else { String residueNumber = matcher . group ( 1 ) ; String insCode = matcher . group ( 2 ) ; buf . append ( residueNumber ) ; if ( insCode != null && ! ( insCode . equals ( "" ) ) ) { buf . append ( "^" ) ; buf . append ( insCode ) ; } } } if ( ! chain1 . equals ( "" ) ) { buf . append ( ":" ) ; buf . append ( chain1 ) ; } return buf . toString ( ) ;
public class CommonTypeFactory { /** * When first level commodity found , complete types must be build ( including all generics ) so the entire * process is restarted for generic types and using all this final type is built . * This is the only place where { @ code alwaysIncludeInterfaces } actually required ! When full type is enabled , * not only base class will be resolved , but also all common interfaces ( to provide the most complete * type without accuracy lost ) . If base class is not detected then all detected interfaces are simply returned * as wildcard ( of course if only 1 interface found , it ' s directly returned without wildcard wrapper ) . * @ param type common root class * @ param contracts common interfaces * @ param firstContext first type generics context * @ param secondContext second type generics context * @ param alwaysIncludeInterfaces always search for common interfaces * @ param cache resolution types cache * @ return final median type for original types */ private static Type buildResultType ( final Class < ? > type , final Set < Class < ? > > contracts , final Map < Class < ? > , LinkedHashMap < String , Type > > firstContext , final Map < Class < ? > , LinkedHashMap < String , Type > > secondContext , final boolean alwaysIncludeInterfaces , final PathsCache cache ) { } }
removeDuplicateContracts ( type , contracts ) ; final List < Type > res = new ArrayList < Type > ( ) ; if ( type != Object . class ) { res . add ( buildCommonType ( type , firstContext , secondContext , alwaysIncludeInterfaces , cache ) ) ; } // resolve interfaces only for root type resolution or if root class cant be found if ( alwaysIncludeInterfaces || res . isEmpty ( ) ) { for ( Class < ? > iface : contracts ) { // simpler resolution for contracts ( only class to prevent cycles ) res . add ( buildCommonType ( iface , firstContext , secondContext , false , cache ) ) ; } // sort found types by specificity : // class - > interface from non java package - > interface with generic - > sort by name // in order to always get predictable order and so always the same resolution Collections . sort ( res , TYPE_COMPARATOR ) ; } return res . isEmpty ( ) ? Object . class : res . size ( ) == 1 ? res . iterator ( ) . next ( ) : WildcardTypeImpl . upper ( res . toArray ( new Type [ 0 ] ) ) ;
public class UniqueNameGenerator { /** * Reserves the name , useful for keywords . */ public void reserve ( String name ) { } }
checkName ( name ) ; // if this is new if ( reserved . add ( name ) ) { // add it to names , so that generateName will still work for reserved names ( they will just // get suffixes ) . if ( ! names . add ( name ) ) { names . remove ( name ) ; throw new IllegalArgumentException ( "newly reserved name: " + name + " was already used!" ) ; } }
public class BindUploader { /** * Return whether the bind map uses array binds * @ param bindValues the bind map * @ return whether the bind map uses array binds */ public static boolean isArrayBind ( Map < String , ParameterBindingDTO > bindValues ) { } }
if ( bindValues == null || bindValues . size ( ) == 0 ) { return false ; } ParameterBindingDTO bindSample = bindValues . values ( ) . iterator ( ) . next ( ) ; return bindSample . getValue ( ) instanceof List ;
public class JSParser { /** * Ref : : = " < " < IDENTIFIER > " > " */ final public JSDynamic Ref ( Map refs ) throws ParseException { } }
Token tok ; JSDynamic ans = new JSDynamic ( ) ; jj_consume_token ( 11 ) ; tok = jj_consume_token ( IDENTIFIER ) ; jj_consume_token ( 12 ) ; addRef ( refs , tok . image , ans ) ; { if ( true ) return ans ; } throw new Error ( "Missing return statement in function" ) ;
public class TSNE { /** * Squared distance , in projection space . * @ param v1 First vector * @ param v2 Second vector * @ return Squared distance */ protected double sqDist ( double [ ] v1 , double [ ] v2 ) { } }
assert ( v1 . length == v2 . length ) : "Lengths do not agree: " + v1 . length + " " + v2 . length ; double sum = 0 ; for ( int i = 0 ; i < v1 . length ; i ++ ) { final double diff = v1 [ i ] - v2 [ i ] ; sum += diff * diff ; } ++ projectedDistances ; return sum ;
public class KubernetesDeserializer { /** * Registers a Custom Resource Definition Kind */ public static void registerCustomKind ( String kind , Class < ? extends KubernetesResource > clazz ) { } }
registerCustomKind ( null , kind , clazz ) ;
public class BoundingBox { /** * Get the overlapping bounding box with the provided bounding box * @ param boundingBox * bounding box * @ param allowEmpty * allow empty ranges when determining overlap * @ return bounding box * @ since 3.1.0 */ public BoundingBox overlap ( BoundingBox boundingBox , boolean allowEmpty ) { } }
double minLongitude = Math . max ( getMinLongitude ( ) , boundingBox . getMinLongitude ( ) ) ; double maxLongitude = Math . min ( getMaxLongitude ( ) , boundingBox . getMaxLongitude ( ) ) ; double minLatitude = Math . max ( getMinLatitude ( ) , boundingBox . getMinLatitude ( ) ) ; double maxLatitude = Math . min ( getMaxLatitude ( ) , boundingBox . getMaxLatitude ( ) ) ; BoundingBox overlap = null ; if ( ( minLongitude < maxLongitude && minLatitude < maxLatitude ) || ( allowEmpty && minLongitude <= maxLongitude && minLatitude <= maxLatitude ) ) { overlap = new BoundingBox ( minLongitude , minLatitude , maxLongitude , maxLatitude ) ; } return overlap ;
public class ConfigDocumentFactory { /** * Parses a Reader into a ConfigDocument instance . * @ param reader * the reader to parse * @ param options * parse options to control how the reader is interpreted * @ return the parsed configuration * @ throws com . typesafe . config . ConfigException on IO or parse errors */ public static ConfigDocument parseReader ( Reader reader , ConfigParseOptions options ) { } }
return Parseable . newReader ( reader , options ) . parseConfigDocument ( ) ;
public class WebSocketUtils { /** * Transfer the data from the source to the sink using the given through buffer to pass data through . */ public static long transfer ( final ReadableByteChannel source , final long count , final ByteBuffer throughBuffer , final WritableByteChannel sink ) throws IOException { } }
long total = 0L ; while ( total < count ) { throughBuffer . clear ( ) ; if ( count - total < throughBuffer . remaining ( ) ) { throughBuffer . limit ( ( int ) ( count - total ) ) ; } try { long res = source . read ( throughBuffer ) ; if ( res <= 0 ) { return total == 0L ? res : total ; } } finally { throughBuffer . flip ( ) ; } while ( throughBuffer . hasRemaining ( ) ) { long res = sink . write ( throughBuffer ) ; if ( res <= 0 ) { return total ; } total += res ; } } return total ;