signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class OpenPgpContact { /** * Return a { @ link Set } of { @ link OpenPgpV4Fingerprint } s of all keys of the contact , which have the trust state * { @ link OpenPgpStore . Trust # untrusted } . * @ return untrusted fingerprints * @ throws IOException IO error * @ throws PGPException PGP error */ public Set < OpenPgpV4Fingerprint > getUntrustedFingerprints ( ) throws IOException , PGPException { } }
return getFingerprintsOfKeysWithState ( getAnyPublicKeys ( ) , OpenPgpTrustStore . Trust . untrusted ) ;
public class Entity { /** * Returns a list of this entity instances with null from and count and with * the given filter * @ param ctx The context * @ param filter The filter * @ return The list * @ throws PMException */ public List < ? > getList ( PMContext ctx , EntityFilter filter ) throws PMException { } }
return getList ( ctx , filter , null , null , null ) ;
public class CommerceWarehouseItemUtil { /** * Returns the last commerce warehouse item in the ordered set where commerceWarehouseId = & # 63 ; . * @ param commerceWarehouseId the commerce warehouse ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce warehouse item , or < code > null < / code > if a matching commerce warehouse item could not be found */ public static CommerceWarehouseItem fetchByCommerceWarehouseId_Last ( long commerceWarehouseId , OrderByComparator < CommerceWarehouseItem > orderByComparator ) { } }
return getPersistence ( ) . fetchByCommerceWarehouseId_Last ( commerceWarehouseId , orderByComparator ) ;
public class SyncCommand { /** * Serializes the given object into the given writer . The following format will * be used to serialize objects . The first two characters are the type index , see * typeMap above . After that , a single digit that indicates the length of the following * length field follows . After that , the length field is serialized , followed by the * string value of the given object and a space character for human readability . * @ param outputStream * @ param obj */ public static void serializeData ( DataOutputStream outputStream , byte [ ] data ) throws IOException { } }
outputStream . writeInt ( data . length ) ; outputStream . write ( data ) ; outputStream . flush ( ) ;
public class V1InstanceCreator { /** * Create a new schedule entity with a name , iteration length , and iteration gap * @ param name Name of the new schedule * @ param iterationLength The duration an iteration will last in this schedule * @ param iterationGap The duration between iterations in this schedule . * @ return A newly minted Schedule that exists in the VersionOne system . */ public Schedule schedule ( String name , Duration iterationLength , Duration iterationGap ) { } }
return schedule ( name , iterationLength , iterationGap , null ) ;
public class ChronoHistory { /** * / * [ deutsch ] * < p > Rekonstruiert die Kalenderhistorie von der angegebenen Beschreibung . < / p > * @ param variant description as defined in { @ link # getVariant ( ) } * @ return ChronoHistory * @ throws IllegalArgumentException if the variant cannot be interpreted as calendar history * @ since 3.36/4.31 */ public static ChronoHistory from ( String variant ) { } }
if ( ! variant . startsWith ( "historic-" ) ) { throw new IllegalArgumentException ( "Variant does not start with \"historic-\": " + variant ) ; } String [ ] parts = variant . substring ( 9 ) . split ( ":" ) ; if ( parts . length == 0 ) { throw new IllegalArgumentException ( "Invalid variant description." ) ; } HistoricVariant hv = HistoricVariant . valueOf ( parts [ 0 ] ) ; ChronoHistory history ; int startIndex = 2 ; switch ( hv ) { case PROLEPTIC_GREGORIAN : return ChronoHistory . PROLEPTIC_GREGORIAN ; case PROLEPTIC_JULIAN : return ChronoHistory . PROLEPTIC_JULIAN ; case PROLEPTIC_BYZANTINE : return ChronoHistory . PROLEPTIC_BYZANTINE ; case SWEDEN : history = ChronoHistory . ofSweden ( ) ; startIndex = 1 ; break ; case INTRODUCTION_ON_1582_10_15 : if ( ! getGregorianCutOverDate ( parts , variant ) . equals ( PlainDate . of ( 1582 , 10 , 15 ) ) ) { throw new IllegalArgumentException ( "Inconsistent cutover date: " + variant ) ; } history = ChronoHistory . ofFirstGregorianReform ( ) ; break ; case SINGLE_CUTOVER_DATE : PlainDate cutover = getGregorianCutOverDate ( parts , variant ) ; history = ChronoHistory . ofGregorianReform ( cutover ) ; break ; default : throw new UnsupportedOperationException ( hv . name ( ) ) ; } String [ ] a = parts [ startIndex ] . split ( "=" ) ; if ( a [ 0 ] . equals ( "ancient-julian-leap-years" ) ) { String ajly = a [ 1 ] . substring ( 1 , a [ 1 ] . length ( ) - 1 ) ; if ( ! ajly . isEmpty ( ) ) { String [ ] nums = ajly . split ( "," ) ; int [ ] bcYears = new int [ nums . length ] ; for ( int i = 0 ; i < nums . length ; i ++ ) { bcYears [ i ] = 1 - Integer . parseInt ( nums [ i ] ) ; } history = history . with ( AncientJulianLeapYears . of ( bcYears ) ) ; } } String [ ] b = parts [ startIndex + 1 ] . split ( "=" ) ; if ( b [ 0 ] . equals ( "new-year-strategy" ) ) { String desc = b [ 1 ] . substring ( 1 , b [ 1 ] . length ( ) - 1 ) ; String [ ] rules = desc . split ( "," ) ; NewYearStrategy nys = null ; for ( int i = 0 ; i < rules . length ; i ++ ) { String [ ] rule = rules [ i ] . split ( "->" ) ; NewYearRule nyr = NewYearRule . valueOf ( rule [ 0 ] ) ; int annoDomini = ( rule . length == 2 ? Integer . parseInt ( rule [ 1 ] ) : Integer . MAX_VALUE ) ; if ( nys == null ) { if ( ( nyr == NewYearRule . BEGIN_OF_JANUARY ) && ( annoDomini == 567 ) ) { continue ; } nys = nyr . until ( annoDomini ) ; } else { nys = nys . and ( nyr . until ( annoDomini ) ) ; } } history = history . with ( nys ) ; } String [ ] c = parts [ startIndex + 2 ] . split ( "=" ) ; if ( c [ 0 ] . equals ( "era-preference" ) ) { String desc = c [ 1 ] . substring ( 1 , c [ 1 ] . length ( ) - 1 ) ; if ( ! desc . equals ( "default" ) ) { String [ ] prefs = desc . split ( "," ) ; try { HistoricEra era = HistoricEra . valueOf ( prefs [ 0 ] . substring ( 5 ) ) ; PlainDate start = Iso8601Format . parseDate ( prefs [ 1 ] . substring ( 7 ) ) ; PlainDate end = Iso8601Format . parseDate ( prefs [ 2 ] . substring ( 5 ) ) ; switch ( era ) { case HISPANIC : history = history . with ( EraPreference . hispanicBetween ( start , end ) ) ; break ; case BYZANTINE : history = history . with ( EraPreference . byzantineBetween ( start , end ) ) ; break ; case AB_URBE_CONDITA : history = history . with ( EraPreference . abUrbeConditaBetween ( start , end ) ) ; break ; default : throw new IllegalArgumentException ( "BC/AD not allowed as era preference: " + variant ) ; } } catch ( ParseException pe ) { throw new IllegalArgumentException ( "Invalid date syntax: " + variant ) ; } } } return history ;
public class Ordering { /** * Returns the least of the specified values according to this ordering . If there are multiple * least values , the first of those is returned . The iterator will be left exhausted : its { @ code * hasNext ( ) } method will return { @ code false } . * < p > < b > Java 8 users : < / b > Continue to use this method for now . After the next release of Guava , * use { @ code Streams . stream ( iterator ) . min ( thisComparator ) . get ( ) } instead ( but note that it does * not guarantee which tied minimum element is returned ) . * @ param iterator the iterator whose minimum element is to be determined * @ throws NoSuchElementException if { @ code iterator } is empty * @ throws ClassCastException if the parameters are not < i > mutually comparable < / i > under this * ordering . * @ since 11.0 */ @ CanIgnoreReturnValue // TODO ( kak ) : Consider removing this public < E extends T > E min ( Iterator < E > iterator ) { } }
// let this throw NoSuchElementException as necessary E minSoFar = iterator . next ( ) ; while ( iterator . hasNext ( ) ) { minSoFar = min ( minSoFar , iterator . next ( ) ) ; } return minSoFar ;
public class CasServerDiscoveryProfileEndpoint { /** * Discovery . * @ return the map */ @ GetMapping @ ResponseBody public Map < String , Object > discovery ( ) { } }
val results = new HashMap < String , Object > ( ) ; results . put ( "profile" , casServerProfileRegistrar . getProfile ( ) ) ; return results ;
public class AbstractCodeElementExtractor { /** * Replies the assignment component with the given nazme in the given grammar component . * @ param grammarComponent the component to explore . * @ param assignmentName the name of the assignment to search for . * @ return the assignment component . */ protected static Action findAction ( EObject grammarComponent , String assignmentName ) { } }
for ( final Action action : GrammarUtil . containedActions ( grammarComponent ) ) { if ( GrammarUtil . isAssignedAction ( action ) ) { if ( Objects . equals ( assignmentName , action . getFeature ( ) ) ) { return action ; } } } return null ;
public class StorageAccountsInner { /** * Gets the first page of Azure Storage accounts , if any , linked to the specified Data Lake Analytics account . The response includes a link to the next page , if any . * @ param resourceGroupName The name of the Azure resource group . * @ param accountName The name of the Data Lake Analytics account . * @ param filter The OData filter . Optional . * @ param top The number of items to return . Optional . * @ param skip The number of items to skip over before returning elements . Optional . * @ param select OData Select statement . Limits the properties on each entry to just those requested , e . g . Categories ? $ select = CategoryName , Description . Optional . * @ param orderby OrderBy clause . One or more comma - separated expressions with an optional " asc " ( the default ) or " desc " depending on the order you ' d like the values sorted , e . g . Categories ? $ orderby = CategoryName desc . Optional . * @ param count The Boolean value of true or false to request a count of the matching resources included with the resources in the response , e . g . Categories ? $ count = true . Optional . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < StorageAccountInformationInner > > listByAccountAsync ( final String resourceGroupName , final String accountName , final String filter , final Integer top , final Integer skip , final String select , final String orderby , final Boolean count , final ListOperationCallback < StorageAccountInformationInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( listByAccountSinglePageAsync ( resourceGroupName , accountName , filter , top , skip , select , orderby , count ) , new Func1 < String , Observable < ServiceResponse < Page < StorageAccountInformationInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < StorageAccountInformationInner > > > call ( String nextPageLink ) { return listByAccountNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class AbstractColorPickerPreference { /** * Creates and returns the layout params of the view , which is used to show a preview of the * preference ' s color , depending on the preference ' s properties . * @ return The layout params , which have been created , as an instance of the class { @ link * LayoutParams } */ private LayoutParams createPreviewLayoutParams ( ) { } }
LayoutParams layoutParams = new LayoutParams ( getPreviewSize ( ) , getPreviewSize ( ) ) ; layoutParams . gravity = Gravity . CENTER_VERTICAL ; return layoutParams ;
public class ValueEnforcer { /** * Check if * < code > nValue & ge ; nLowerBoundInclusive & amp ; & amp ; nValue & le ; nUpperBoundInclusive < / code > * @ param fValue * Value * @ param sName * Name * @ param fLowerBoundInclusive * Lower bound * @ param fUpperBoundInclusive * Upper bound * @ return The value */ public static float isBetweenInclusive ( final float fValue , final String sName , final float fLowerBoundInclusive , final float fUpperBoundInclusive ) { } }
if ( isEnabled ( ) ) return isBetweenInclusive ( fValue , ( ) -> sName , fLowerBoundInclusive , fUpperBoundInclusive ) ; return fValue ;
public class BusinessReportScheduleMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BusinessReportSchedule businessReportSchedule , ProtocolMarshaller protocolMarshaller ) { } }
if ( businessReportSchedule == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( businessReportSchedule . getScheduleArn ( ) , SCHEDULEARN_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getScheduleName ( ) , SCHEDULENAME_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getS3BucketName ( ) , S3BUCKETNAME_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getS3KeyPrefix ( ) , S3KEYPREFIX_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getFormat ( ) , FORMAT_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getContentRange ( ) , CONTENTRANGE_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getRecurrence ( ) , RECURRENCE_BINDING ) ; protocolMarshaller . marshall ( businessReportSchedule . getLastBusinessReport ( ) , LASTBUSINESSREPORT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Graphics { /** * Sets a perspective matrix defined through the parameters . Works like * glFrustum , except it wipes out the current perspective matrix rather * than multiplying itself with it . * @ param left * left coordinate of the clipping plane * @ param right * right coordinate of the clipping plane * @ param bottom * bottom coordinate of the clipping plane * @ param top * top coordinate of the clipping plane * @ param near * near component of the clipping plane * @ param far * far component of the clipping plane */ public void setFrustum ( double left , double right , double bottom , double top , double near , double far ) { } }
matrixMode ( MatrixMode . PROJECTION ) ; resetMatrix ( ) ; gl . glFrustum ( left , right , bottom , top , near , far ) ; matrixMode ( MatrixMode . MODELVIEW ) ; resetMatrix ( ) ;
public class BulkheadExports { /** * Creates a new instance of { @ link BulkheadExports } with specified metrics names prefix and * { @ link Iterable } of bulkheads . * @ param prefix the prefix of metrics names * @ param bulkheads the bulkheads */ public static BulkheadExports ofIterable ( String prefix , Iterable < Bulkhead > bulkheads ) { } }
return new BulkheadExports ( prefix , bulkheads ) ;
public class BaasUser { /** * Logouts the user from the server . After this call completes no current user * is available . { @ link BaasUser # current ( ) } will return < code > null < / code > . * @ param handler an handler to be invoked upon completion of the request * @ return a { @ link com . baasbox . android . RequestToken } to handle the async request */ public RequestToken logout ( BaasHandler < Void > handler ) { } }
return logout ( null , RequestOptions . DEFAULT , handler ) ;
public class HashExtensions { /** * Hashes the given { @ link String } object with the given parameters . * @ param hashIt * the hash it * @ param salt * the salt * @ param hashAlgorithm * the hash algorithm * @ param charset * the charset * @ return the generated { @ link String } object * @ throws NoSuchAlgorithmException * is thrown if instantiation of the MessageDigest object fails . */ public static String hash ( final String hashIt , final String salt , final HashAlgorithm hashAlgorithm , final Charset charset ) throws NoSuchAlgorithmException { } }
final MessageDigest messageDigest = MessageDigest . getInstance ( hashAlgorithm . getAlgorithm ( ) ) ; messageDigest . reset ( ) ; messageDigest . update ( salt . getBytes ( charset ) ) ; return new String ( messageDigest . digest ( hashIt . getBytes ( charset ) ) , charset ) ;
public class TaskTracker { /** * Pick a task to kill to free up memory / disk - space * @ param tasksToExclude tasks that are to be excluded while trying to find a * task to kill . If null , all runningTasks will be searched . * @ return the task to kill or null , if one wasn ' t found */ synchronized TaskInProgress findTaskToKill ( List < TaskAttemptID > tasksToExclude ) { } }
TaskInProgress killMe = null ; for ( Iterator it = runningTasks . values ( ) . iterator ( ) ; it . hasNext ( ) ; ) { TaskInProgress tip = ( TaskInProgress ) it . next ( ) ; if ( tasksToExclude != null && tasksToExclude . contains ( tip . getTask ( ) . getTaskID ( ) ) ) { // exclude this task continue ; } if ( ( tip . getRunState ( ) == TaskStatus . State . RUNNING || tip . getRunState ( ) == TaskStatus . State . COMMIT_PENDING ) && ! tip . wasKilled ) { if ( killMe == null ) { killMe = tip ; } else if ( ! tip . getTask ( ) . isMapTask ( ) ) { // reduce task , give priority if ( killMe . getTask ( ) . isMapTask ( ) || ( tip . getTask ( ) . getProgress ( ) . get ( ) < killMe . getTask ( ) . getProgress ( ) . get ( ) ) ) { killMe = tip ; } } else if ( killMe . getTask ( ) . isMapTask ( ) && tip . getTask ( ) . getProgress ( ) . get ( ) < killMe . getTask ( ) . getProgress ( ) . get ( ) ) { // map task , only add if the progress is lower killMe = tip ; } } } return killMe ;
public class ByteSequenceIterator { /** * Descends to a given node , adds its arcs to the stack to be traversed . */ private void pushNode ( int node ) { } }
// Expand buffers if needed . if ( position == arcs . length ) { arcs = Arrays . copyOf ( arcs , arcs . length + EXPECTED_MAX_STATES ) ; } arcs [ position ++ ] = fsa . getFirstArc ( node ) ;
public class TimeOfDay { /** * Returns a copy of this time with the value of the specified field increased , * wrapping to what would be a new day if required . * If the addition is zero , then < code > this < / code > is returned . * These three lines are equivalent : * < pre > * TimeOfDay added = tod . withFieldAdded ( DurationFieldType . minutes ( ) , 6 ) ; * TimeOfDay added = tod . plusMinutes ( 6 ) ; * TimeOfDay added = tod . minuteOfHour ( ) . addToCopy ( 6 ) ; * < / pre > * @ param fieldType the field type to add to , not null * @ param amount the amount to add * @ return a copy of this instance with the field updated * @ throws IllegalArgumentException if the value is null or invalid * @ throws ArithmeticException if the new datetime exceeds the capacity */ public TimeOfDay withFieldAdded ( DurationFieldType fieldType , int amount ) { } }
int index = indexOfSupported ( fieldType ) ; if ( amount == 0 ) { return this ; } int [ ] newValues = getValues ( ) ; newValues = getField ( index ) . addWrapPartial ( this , index , newValues , amount ) ; return new TimeOfDay ( this , newValues ) ;
public class InternalXtextParser { /** * InternalXtext . g : 3646:1 : ruleEnumLiterals returns [ EObject current = null ] : ( this _ EnumLiteralDeclaration _ 0 = ruleEnumLiteralDeclaration ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? ) ; */ public final EObject ruleEnumLiterals ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_EnumLiteralDeclaration_0 = null ; EObject lv_elements_3_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 3652:2 : ( ( this _ EnumLiteralDeclaration _ 0 = ruleEnumLiteralDeclaration ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? ) ) // InternalXtext . g : 3653:2 : ( this _ EnumLiteralDeclaration _ 0 = ruleEnumLiteralDeclaration ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? ) { // InternalXtext . g : 3653:2 : ( this _ EnumLiteralDeclaration _ 0 = ruleEnumLiteralDeclaration ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? ) // InternalXtext . g : 3654:3 : this _ EnumLiteralDeclaration _ 0 = ruleEnumLiteralDeclaration ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? { newCompositeNode ( grammarAccess . getEnumLiteralsAccess ( ) . getEnumLiteralDeclarationParserRuleCall_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_25 ) ; this_EnumLiteralDeclaration_0 = ruleEnumLiteralDeclaration ( ) ; state . _fsp -- ; current = this_EnumLiteralDeclaration_0 ; afterParserOrEnumRuleCall ( ) ; // InternalXtext . g : 3662:3 : ( ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + ) ? int alt77 = 2 ; int LA77_0 = input . LA ( 1 ) ; if ( ( LA77_0 == 30 ) ) { alt77 = 1 ; } switch ( alt77 ) { case 1 : // InternalXtext . g : 3663:4 : ( ) ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + { // InternalXtext . g : 3663:4 : ( ) // InternalXtext . g : 3664:5: { current = forceCreateModelElementAndAdd ( grammarAccess . getEnumLiteralsAccess ( ) . getAlternativesElementsAction_1_0 ( ) , current ) ; } // InternalXtext . g : 3670:4 : ( otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) ) + int cnt76 = 0 ; loop76 : do { int alt76 = 2 ; int LA76_0 = input . LA ( 1 ) ; if ( ( LA76_0 == 30 ) ) { alt76 = 1 ; } switch ( alt76 ) { case 1 : // InternalXtext . g : 3671:5 : otherlv _ 2 = ' | ' ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) { otherlv_2 = ( Token ) match ( input , 30 , FollowSets000 . FOLLOW_13 ) ; newLeafNode ( otherlv_2 , grammarAccess . getEnumLiteralsAccess ( ) . getVerticalLineKeyword_1_1_0 ( ) ) ; // InternalXtext . g : 3675:5 : ( ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) ) // InternalXtext . g : 3676:6 : ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) { // InternalXtext . g : 3676:6 : ( lv _ elements _ 3_0 = ruleEnumLiteralDeclaration ) // InternalXtext . g : 3677:7 : lv _ elements _ 3_0 = ruleEnumLiteralDeclaration { newCompositeNode ( grammarAccess . getEnumLiteralsAccess ( ) . getElementsEnumLiteralDeclarationParserRuleCall_1_1_1_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_25 ) ; lv_elements_3_0 = ruleEnumLiteralDeclaration ( ) ; state . _fsp -- ; if ( current == null ) { current = createModelElementForParent ( grammarAccess . getEnumLiteralsRule ( ) ) ; } add ( current , "elements" , lv_elements_3_0 , "org.eclipse.xtext.Xtext.EnumLiteralDeclaration" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; default : if ( cnt76 >= 1 ) break loop76 ; EarlyExitException eee = new EarlyExitException ( 76 , input ) ; throw eee ; } cnt76 ++ ; } while ( true ) ; } break ; } } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class BufferUtils { /** * Checks if the given byte array starts with a constant sequence of bytes of the given value * and length . * @ param value the value to check for * @ param len the target length of the sequence * @ param arr the byte array to check * @ return true if the byte array has a prefix of length { @ code len } that is a constant * sequence of bytes of the given value */ public static boolean equalConstantByteArray ( byte value , int len , byte [ ] arr ) { } }
if ( arr == null || arr . length != len ) { return false ; } for ( int k = 0 ; k < len ; k ++ ) { if ( arr [ k ] != value ) { return false ; } } return true ;
public class ParserString { /** * Stellt den internen Zeiger an den Anfang der naechsten Zeile , gibt zurueck ob eine weitere Zeile * existiert oder ob es bereits die letzte Zeile war . * @ return Existiert eine weitere Zeile . */ public boolean nextLine ( ) { } }
while ( isValidIndex ( ) && text [ pos ] != '\n' ) { next ( ) ; } if ( isValidIndex ( ) && text [ pos ] == '\n' ) { next ( ) ; return isValidIndex ( ) ; } return false ;
public class DescribeTrainingJobResult { /** * An array of < code > Channel < / code > objects that describes each data input channel . * @ param inputDataConfig * An array of < code > Channel < / code > objects that describes each data input channel . */ public void setInputDataConfig ( java . util . Collection < Channel > inputDataConfig ) { } }
if ( inputDataConfig == null ) { this . inputDataConfig = null ; return ; } this . inputDataConfig = new java . util . ArrayList < Channel > ( inputDataConfig ) ;
public class AbstractAsyncFuture { /** * Runs a callback on the given listener safely . We cannot allow misbehaved application * callback code to spoil the notification of subsequent listeners or other tidy - up work , * so the callbacks have to be tightly wrappered in an exception hander that ignores the * error and continues with the next callback . This could lead to creative * problems in the app code , so callbacks must be written carefully . * @ param listener * @ param future * @ param userState */ protected void invokeCallback ( ICompletionListener listener , AbstractAsyncFuture future , Object userState ) { } }
try { ExecutorService executorService = CHFWBundle . getExecutorService ( ) ; if ( null == executorService ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Unable to schedule callback, using this thread" ) ; } listener . futureCompleted ( future , userState ) ; } else { if ( null == this . myCallback ) { this . myCallback = new WorkCallback ( listener , userState ) ; } else { this . myCallback . myListener = listener ; this . myCallback . myState = userState ; } executorService . execute ( this . myCallback ) ; } } catch ( Throwable problem ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error invoking callback, exception: " + problem + " : " + problem . getMessage ( ) ) ; } FFDCFilter . processException ( problem , getClass ( ) . getName ( ) + ".invokeCallback" , "182" , this ) ; }
public class BaseHllSketch { /** * Gets the current ( approximate ) Relative Error ( RE ) asymptotic values given several * parameters . This is used primarily for testing . * @ param upperBound return the RE for the Upper Bound , otherwise for the Lower Bound . * @ param unioned set true if the sketch is the result of a union operation . * @ param lgConfigK the configured value for the sketch . * @ param numStdDev the given number of Standard Deviations . This must be an integer between * 1 and 3 , inclusive . * < a href = " { @ docRoot } / resources / dictionary . html # numStdDev " > Number of Standard Deviations < / a > * @ return the current ( approximate ) RelativeError */ public double getRelErr ( final boolean upperBound , final boolean unioned , final int lgConfigK , final int numStdDev ) { } }
return RelativeErrorTables . getRelErr ( upperBound , unioned , lgConfigK , numStdDev ) ;
public class DefaultClusterManager { /** * Clears all items in a queue . */ private void doQueueClear ( final Message < JsonObject > message ) { } }
final String name = message . body ( ) . getString ( "name" ) ; if ( name == null ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , "No name specified." ) ) ; return ; } context . execute ( new Action < Void > ( ) { @ Override public Void perform ( ) { data . getQueue ( formatKey ( name ) ) . clear ( ) ; return null ; } } , new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { message . reply ( new JsonObject ( ) . putString ( "status" , "error" ) . putString ( "message" , result . cause ( ) . getMessage ( ) ) ) ; } else { message . reply ( new JsonObject ( ) . putString ( "status" , "ok" ) ) ; } } } ) ;
public class AstaTextFileReader { /** * Retrieve table data , return an empty result set if no table data is present . * @ param name table name * @ return table data */ private List < Row > getTable ( String name ) { } }
List < Row > result = m_tables . get ( name ) ; if ( result == null ) { result = Collections . emptyList ( ) ; } return result ;
public class PortablePositionNavigator { /** * Token with [ number ] quantifier . It means we are navigating in an array cell . */ private static PortablePosition navigateToPathTokenWithNumberQuantifier ( PortableNavigatorContext ctx , PortablePathCursor path ) throws IOException { } }
// makes sure that the field type is an array and parses the qantifier validateArrayType ( ctx . getCurrentClassDefinition ( ) , ctx . getCurrentFieldDefinition ( ) , path . path ( ) ) ; int index = validateAndGetArrayQuantifierFromCurrentToken ( path . token ( ) , path . path ( ) ) ; // reads the array length and checks if the index is in - bound int len = getArrayLengthOfTheField ( ctx ) ; if ( len == 0 ) { return emptyAnyPosition ( path . isLastToken ( ) ) ; } else if ( len == Bits . NULL_ARRAY_LENGTH ) { return nilAnyPosition ( path . isLastToken ( ) ) ; } else if ( index >= len ) { return nilAnyPosition ( path . isLastToken ( ) ) ; } else { // when index in - bound if ( path . isLastToken ( ) ) { // if it ' s a token that ' s on the last position we calculate its direct access position and return it for // reading in the value reader . return createPositionForReadAccess ( ctx , path , index ) ; } else if ( ctx . isCurrentFieldOfType ( FieldType . PORTABLE_ARRAY ) ) { // otherwise we advance only if the type is a portable _ array . We cannot navigate further in a primitive // type and the portable arrays may store portable or primitive types only . navigateContextToNextPortableTokenFromPortableArrayCell ( ctx , path , index ) ; } } return null ;
public class CmsXmlContent { /** * Returns all simple type sub values . < p > * @ param value the value * @ return the simple type sub values */ public List < I_CmsXmlContentValue > getAllSimpleSubValues ( I_CmsXmlContentValue value ) { } }
List < I_CmsXmlContentValue > result = new ArrayList < I_CmsXmlContentValue > ( ) ; for ( I_CmsXmlContentValue subValue : getSubValues ( value . getPath ( ) , value . getLocale ( ) ) ) { if ( subValue . isSimpleType ( ) ) { result . add ( subValue ) ; } else { result . addAll ( getAllSimpleSubValues ( subValue ) ) ; } } return result ;
public class TypeUsage_Builder { /** * Sets the value to be returned by { @ link TypeUsage # type ( ) } . * @ return this { @ code Builder } object * @ throws NullPointerException if { @ code type } is null */ public TypeUsage . Builder type ( QualifiedName type ) { } }
this . type = Objects . requireNonNull ( type ) ; _unsetProperties . remove ( Property . TYPE ) ; return ( TypeUsage . Builder ) this ;
public class MariaDbClob { /** * Return character length of the Clob . Assume UTF8 encoding . */ @ Override public long length ( ) { } }
// The length of a character string is the number of UTF - 16 units ( not the number of characters ) long len = 0 ; int pos = offset ; // set ASCII ( < = 127 chars ) for ( ; len < length && data [ pos ] >= 0 ; ) { len ++ ; pos ++ ; } // multi - bytes UTF - 8 while ( pos < offset + length ) { byte firstByte = data [ pos ++ ] ; if ( firstByte < 0 ) { if ( firstByte >> 5 != - 2 || ( firstByte & 30 ) == 0 ) { if ( firstByte >> 4 == - 2 ) { if ( pos + 1 < offset + length ) { pos += 2 ; len ++ ; } else { throw new UncheckedIOException ( "invalid UTF8" , new CharacterCodingException ( ) ) ; } } else if ( firstByte >> 3 != - 2 ) { throw new UncheckedIOException ( "invalid UTF8" , new CharacterCodingException ( ) ) ; } else if ( pos + 2 < offset + length ) { pos += 3 ; len += 2 ; } else { // bad truncated UTF8 pos += offset + length ; len += 1 ; } } else { pos ++ ; len ++ ; } } else { len ++ ; } } return len ;
public class ImageLoader { /** * Load a rastered image from file * @ param file the file to load * @ return the rastered image * @ throws IOException */ public int [ ] [ ] fromFile ( File file ) throws IOException { } }
BufferedImage image = ImageIO . read ( file ) ; image = scalingIfNeed ( image , true ) ; return toIntArrayArray ( image ) ;
public class ReadablePartialConverter { /** * Extracts the values of the partial from an object of this converter ' s type . * The chrono parameter is a hint to the converter , should it require a * chronology to aid in conversion . * @ param fieldSource a partial that provides access to the fields . * This partial may be incomplete and only getFieldType ( int ) should be used * @ param object the object to convert * @ param chrono the chronology to use , which is the non - null result of getChronology ( ) * @ return the array of field values that match the fieldSource , must be non - null valid * @ throws ClassCastException if the object is invalid */ public int [ ] getPartialValues ( ReadablePartial fieldSource , Object object , Chronology chrono ) { } }
ReadablePartial input = ( ReadablePartial ) object ; int size = fieldSource . size ( ) ; int [ ] values = new int [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { values [ i ] = input . get ( fieldSource . getFieldType ( i ) ) ; } chrono . validate ( fieldSource , values ) ; return values ;
public class Transliterator { /** * Unregisters a transliterator or class . This may be either * a system transliterator or a user transliterator or class . * @ param ID the ID of the transliterator or class * @ see # registerClass */ public static void unregister ( String ID ) { } }
displayNameCache . remove ( new CaseInsensitiveString ( ID ) ) ; registry . remove ( ID ) ;
public class InternalXtypeParser { /** * Delegated rules */ public final boolean synpred4_InternalXtype ( ) { } }
state . backtracking ++ ; int start = input . mark ( ) ; try { synpred4_InternalXtype_fragment ( ) ; // can never throw exception } catch ( RecognitionException re ) { System . err . println ( "impossible: " + re ) ; } boolean success = ! state . failed ; input . rewind ( start ) ; state . backtracking -- ; state . failed = false ; return success ;
public class JAXBMarshaller { /** * { @ inheritDoc } */ public void marshal ( Object oValue , OutputStream out ) throws IOException { } }
try { javax . xml . bind . Marshaller marshaller = m_ctx . createMarshaller ( ) ; configureJaxbMarshaller ( marshaller ) ; marshaller . marshal ( oValue , out ) ; } catch ( JAXBException e ) { throw new IOException ( e ) ; }
public class Invariants { /** * A { @ code double } specialized version of { @ link # checkInvariants ( Object , * ContractConditionType [ ] ) } * @ param value The value * @ param conditions The conditions the value must obey * @ return value * @ throws InvariantViolationException If any of the conditions are false */ public static double checkInvariantsD ( final double value , final ContractDoubleConditionType ... conditions ) throws InvariantViolationException { } }
final Violations violations = innerCheckAllDouble ( value , conditions ) ; if ( violations != null ) { throw new InvariantViolationException ( failedMessage ( Double . valueOf ( value ) , violations ) , null , violations . count ( ) ) ; } return value ;
public class Tile { /** * Render the tile image at the specified position in the given * graphics context . */ public void paint ( Graphics2D gfx , int x , int y ) { } }
_mirage . paint ( gfx , x , y ) ;
public class ResourceManager { /** * Get a new thread - local instance of the ResourceManager * If you are having problems with bundles beeing the same for different * threads and locales , try forceGet ( ) * @ return the thread - local ResourceManager */ public static ResourceManager get ( ) { } }
ResourceManager resourceManager = ( ResourceManager ) instance . get ( ) ; if ( null == resourceManager ) { resourceManager = new ResourceManager ( ) ; instance . set ( resourceManager ) ; } return resourceManager ;
public class BuilderFactory { /** * Return an instance of the annotation type fields builder for the given * class . * @ return an instance of the annotation type field builder for the given * annotation type . */ public AbstractBuilder getAnnotationTypeFieldsBuilder ( AnnotationTypeWriter annotationTypeWriter ) throws Exception { } }
return AnnotationTypeFieldBuilder . getInstance ( context , annotationTypeWriter . getAnnotationTypeDoc ( ) , writerFactory . getAnnotationTypeFieldWriter ( annotationTypeWriter ) ) ;
public class GenericDao { /** * 通过getTableName获取表名 * @ return the tableName */ public String getTableName ( ) { } }
recordLog ( "----" + ThreadContext . getShardKey ( ) + "\t" + getTableName ( orMapping . getTable ( ) , ( Number ) ThreadContext . getShardKey ( ) ) ) ; Number shardKey = ThreadContext . getShardKey ( ) ; return getTableName ( orMapping . getTable ( ) , shardKey ) ;
public class ObjectFactory { /** * Create an instance of { @ link Project . Calendars . Calendar . WorkWeeks . WorkWeek . WeekDays . WeekDay } */ public Project . Calendars . Calendar . WorkWeeks . WorkWeek . WeekDays . WeekDay createProjectCalendarsCalendarWorkWeeksWorkWeekWeekDaysWeekDay ( ) { } }
return new Project . Calendars . Calendar . WorkWeeks . WorkWeek . WeekDays . WeekDay ( ) ;
public class QRCodeWriter { /** * 0 = = black , 255 = = white ( i . e . an 8 bit greyscale bitmap ) . */ private static BitMatrix renderResult ( QRCode code , int width , int height , int quietZone ) { } }
ByteMatrix input = code . getMatrix ( ) ; if ( input == null ) { throw new IllegalStateException ( ) ; } int inputWidth = input . getWidth ( ) ; int inputHeight = input . getHeight ( ) ; int qrWidth = inputWidth + ( quietZone * 2 ) ; int qrHeight = inputHeight + ( quietZone * 2 ) ; int outputWidth = Math . max ( width , qrWidth ) ; int outputHeight = Math . max ( height , qrHeight ) ; int multiple = Math . min ( outputWidth / qrWidth , outputHeight / qrHeight ) ; // Padding includes both the quiet zone and the extra white pixels to accommodate the requested // dimensions . For example , if input is 25x25 the QR will be 33x33 including the quiet zone . // If the requested size is 200x160 , the multiple will be 4 , for a QR of 132x132 . These will // handle all the padding from 100x100 ( the actual QR ) up to 200x160. int leftPadding = ( outputWidth - ( inputWidth * multiple ) ) / 2 ; int topPadding = ( outputHeight - ( inputHeight * multiple ) ) / 2 ; BitMatrix output = new BitMatrix ( outputWidth , outputHeight ) ; for ( int inputY = 0 , outputY = topPadding ; inputY < inputHeight ; inputY ++ , outputY += multiple ) { // Write the contents of this row of the barcode for ( int inputX = 0 , outputX = leftPadding ; inputX < inputWidth ; inputX ++ , outputX += multiple ) { if ( input . get ( inputX , inputY ) == 1 ) { output . setRegion ( outputX , outputY , multiple , multiple ) ; } } } return output ;
public class RoundedMoney { /** * ( non - Javadoc ) * @ see javax . money . MonetaryAmount # remainder ( Number ) */ @ Override public RoundedMoney remainder ( Number divisor ) { } }
return new RoundedMoney ( number . remainder ( MoneyUtils . getBigDecimal ( divisor ) , Optional . ofNullable ( monetaryContext . get ( MathContext . class ) ) . orElse ( MathContext . DECIMAL64 ) ) , currency , rounding ) ;
public class CategoryThreadComparator { /** * 按总次数排序 . * @ param o1 * @ param o2 * @ return */ protected int byCurrentSize ( ThreadInfo o1 , ThreadInfo o2 ) { } }
double count = o2 . getCurrentSize ( ) - o1 . getCurrentSize ( ) ; if ( count > 0 ) { return 1 ; } else if ( count == 0 ) { return 0 ; } else { return - 1 ; }
public class IfcConnectedFaceSetImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcFace > getCfsFaces ( ) { } }
return ( EList < IfcFace > ) eGet ( Ifc4Package . Literals . IFC_CONNECTED_FACE_SET__CFS_FACES , true ) ;
public class WebService { /** * method to generate HELM from a FASTA containing peptide sequence ( s ) * @ param notation * FASTA containing peptide sequence ( s ) * @ return HELM * @ throws FastaFormatException * if the FASTA input is not valid * @ throws MonomerLoadingException * if the MonomerFactory can not be loaded * @ throws ChemistryException * if chemistry engine can not be initialized */ public String generateHELMFromFastaPeptide ( String notation ) throws FastaFormatException , MonomerLoadingException , ChemistryException { } }
String result = FastaFormat . generatePeptidePolymersFromFASTAFormatHELM1 ( notation ) . toHELM2 ( ) ; setMonomerFactoryToDefault ( notation ) ; return result ;
public class SegmentationHelper { /** * Determine if a name is segmented , i . e . if it ends with the correct marker type . * @ param name the name of a packet * @ param marker the marker type ( the initial byte of the component ) * @ return true if the name is segmented */ public static boolean isSegmented ( Name name , byte marker ) { } }
return name . size ( ) > 0 && name . get ( - 1 ) . getValue ( ) . buf ( ) . get ( 0 ) == marker ;
public class GitlabAPI { /** * Delete a project team member . * @ param projectId the project id * @ param userId the user id * @ throws IOException on gitlab api call error */ public void deleteProjectMember ( Integer projectId , Integer userId ) throws IOException { } }
String tailUrl = GitlabProject . URL + "/" + projectId + "/" + GitlabProjectMember . URL + "/" + userId ; retrieve ( ) . method ( DELETE ) . to ( tailUrl , Void . class ) ;
public class CassandraSchemaManager { /** * Check relation and execute query . * @ param embeddableKey * the embeddable key * @ param embeddableToDependentEmbeddables * the embeddable to dependent embeddables * @ param queries * the queries */ private void checkRelationAndExecuteQuery ( String embeddableKey , Map < String , List < String > > embeddableToDependentEmbeddables , Map < String , String > queries ) { } }
List < String > dependentEmbeddables = embeddableToDependentEmbeddables . get ( embeddableKey ) ; if ( ! dependentEmbeddables . isEmpty ( ) ) { for ( String dependentEmbeddable : dependentEmbeddables ) { checkRelationAndExecuteQuery ( dependentEmbeddable , embeddableToDependentEmbeddables , queries ) ; } } KunderaCoreUtils . printQuery ( queries . get ( embeddableKey ) , showQuery ) ; try { cassandra_client . execute_cql3_query ( ByteBuffer . wrap ( queries . get ( embeddableKey ) . getBytes ( Constants . CHARSET_UTF8 ) ) , Compression . NONE , ConsistencyLevel . ONE ) ; } catch ( Exception e ) { throw new KunderaException ( "Error while creating type: " + queries . get ( embeddableKey ) , e ) ; }
public class HBaseUtils { /** * From bytes . * @ param m * the m * @ param metaModel * the meta model * @ param b * the b * @ return the object */ public static Object fromBytes ( EntityMetadata m , MetamodelImpl metaModel , byte [ ] b ) { } }
Class idFieldClass = m . getIdAttribute ( ) . getJavaType ( ) ; if ( metaModel . isEmbeddable ( m . getIdAttribute ( ) . getBindableJavaType ( ) ) ) { return fromBytes ( b , String . class ) ; } return fromBytes ( b , idFieldClass ) ;
public class IntegralValueMapping { /** * ~ Methods * * * * * */ @ Override public Map < Long , Double > mapping ( Map < Long , Double > originalDatapoints ) { } }
Map < Long , Double > sortedDatapoints = new TreeMap < > ( ) ; Double prevSum = 0.0 ; sortedDatapoints . putAll ( originalDatapoints ) ; for ( Entry < Long , Double > entry : sortedDatapoints . entrySet ( ) ) { prevSum += entry . getValue ( ) ; sortedDatapoints . put ( entry . getKey ( ) , prevSum ) ; } return sortedDatapoints ;
public class GenericStorableCodec { /** * Returns an instance of the codec . The Storable type itself may be an * interface or a class . If it is a class , then it must not be final , and * it must have a public , no - arg constructor . * @ param isMaster when true , version properties and sequences are managed * @ param layout when non - null , encode a storable layout generation * value in one or four bytes . Generation 0 . . 127 is encoded in one byte , and * 128 . . max is encoded in four bytes , with the most significant bit set . * @ param support binds generated storable with a storage layer * @ throws SupportException if Storable is not supported * @ throws amazon . carbonado . MalformedTypeException if Storable type is not well - formed * @ throws IllegalArgumentException if type is null */ @ SuppressWarnings ( "unchecked" ) static synchronized < S extends Storable > GenericStorableCodec < S > getInstance ( GenericStorableCodecFactory factory , GenericEncodingStrategy < S > encodingStrategy , boolean isMaster , Layout layout , RawSupport support ) throws SupportException { } }
Object layoutKey = layout == null ? null : new LayoutKey ( layout ) ; Object key = KeyFactory . createKey ( new Object [ ] { encodingStrategy , isMaster , layoutKey } ) ; Class < ? extends S > storableImpl = ( Class < ? extends S > ) cCache . get ( key ) ; if ( storableImpl == null ) { storableImpl = generateStorable ( encodingStrategy , isMaster , layout ) ; cCache . put ( key , storableImpl ) ; } return new GenericStorableCodec < S > ( key , factory , encodingStrategy . getType ( ) , storableImpl , encodingStrategy , layout , support ) ;
public class MSPDIReader { /** * Update the project properties from the project summary task . * @ param task project summary task */ private void updateProjectProperties ( Task task ) { } }
ProjectProperties props = m_projectFile . getProjectProperties ( ) ; props . setComments ( task . getNotes ( ) ) ;
public class Matrix { /** * Converts this matrix into the string representation . * @ param formatter the number formatter * @ param rowsDelimiter the rows ' delimiter * @ param columnsDelimiter the columns ' delimiter * @ return the matrix converted to a string */ public String mkString ( NumberFormat formatter , String rowsDelimiter , String columnsDelimiter ) { } }
// TODO : rewrite using iterators int [ ] formats = new int [ columns ] ; for ( int i = 0 ; i < rows ; i ++ ) { for ( int j = 0 ; j < columns ; j ++ ) { double value = get ( i , j ) ; String output = formatter . format ( value ) ; int size = output . length ( ) ; formats [ j ] = size > formats [ j ] ? size : formats [ j ] ; } } StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < rows ; i ++ ) { for ( int j = 0 ; j < columns ; j ++ ) { String output = formatter . format ( get ( i , j ) ) ; int outputLength = output . length ( ) ; if ( outputLength < formats [ j ] ) { int align = formats [ j ] - outputLength ; if ( align > INDENTS . length - 1 ) { indent ( sb , align ) ; } else { sb . append ( INDENTS [ align - 1 ] ) ; } } sb . append ( output ) . append ( j < columns - 1 ? columnsDelimiter : "" ) ; } sb . append ( rowsDelimiter ) ; } return sb . toString ( ) ;
public class VariantAvroToVariantContextConverter { /** * Adjust start / end if a reference base is required due to an empty allele . All variants are checked due to SecAlts . * @ param variant { @ link Variant } object . * @ param study Study * @ return Pair < Integer , Integer > The adjusted ( or same ) start / end position e . g . SV and MNV as SecAlt , INDEL , etc . */ public static Pair < Integer , Integer > adjustedVariantStart ( Variant variant , StudyEntry study , Map < Integer , Character > referenceAlleles ) { } }
if ( variant . getType ( ) . equals ( VariantType . NO_VARIATION ) ) { return new ImmutablePair < > ( variant . getStart ( ) , variant . getEnd ( ) ) ; } MutablePair < Integer , Integer > pos = adjustedVariantStart ( variant . getStart ( ) , variant . getEnd ( ) , variant . getReference ( ) , variant . getAlternate ( ) , referenceAlleles , null ) ; for ( AlternateCoordinate alternateCoordinate : study . getSecondaryAlternates ( ) ) { int alternateStart = alternateCoordinate . getStart ( ) == null ? variant . getStart ( ) : alternateCoordinate . getStart ( ) . intValue ( ) ; int alternateEnd = alternateCoordinate . getEnd ( ) == null ? variant . getEnd ( ) : alternateCoordinate . getEnd ( ) . intValue ( ) ; String reference = alternateCoordinate . getReference ( ) == null ? variant . getReference ( ) : alternateCoordinate . getReference ( ) ; String alternate = alternateCoordinate . getAlternate ( ) == null ? variant . getAlternate ( ) : alternateCoordinate . getAlternate ( ) ; adjustedVariantStart ( alternateStart , alternateEnd , reference , alternate , referenceAlleles , pos ) ; } return pos ;
public class DistributionPointsBuilder { /** * { @ inheritDoc } */ public DistributionPoints buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } }
return new DistributionPointsImpl ( namespaceURI , localName , namespacePrefix ) ;
public class AdvancedRecyclerArrayAdapter { /** * Adds the specified list of objects at the end of the array . * @ param collection The objects to add at the end of the array . */ public void addAll ( @ NonNull final Collection < T > collection ) { } }
final int length = collection . size ( ) ; if ( length == 0 ) { return ; } synchronized ( mLock ) { final int position = getItemCount ( ) ; mObjects . addAll ( collection ) ; notifyItemRangeInserted ( position , length ) ; }
public class ListAlgorithmsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListAlgorithmsRequest listAlgorithmsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listAlgorithmsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listAlgorithmsRequest . getCreationTimeAfter ( ) , CREATIONTIMEAFTER_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getCreationTimeBefore ( ) , CREATIONTIMEBEFORE_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getNameContains ( ) , NAMECONTAINS_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getSortBy ( ) , SORTBY_BINDING ) ; protocolMarshaller . marshall ( listAlgorithmsRequest . getSortOrder ( ) , SORTORDER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AmazonKinesisAnalyticsV2Client { /** * Infers a schema for an SQL - based Amazon Kinesis Data Analytics application by evaluating sample records on the * specified streaming source ( Kinesis data stream or Kinesis Data Firehose delivery stream ) or Amazon S3 object . In * the response , the operation returns the inferred schema and also the sample records that the operation used to * infer the schema . * You can use the inferred schema when configuring a streaming source for your application . When you create an * application using the Kinesis Data Analytics console , the console uses this operation to infer a schema and show * it in the console user interface . * @ param discoverInputSchemaRequest * @ return Result of the DiscoverInputSchema operation returned by the service . * @ throws InvalidArgumentException * The specified input parameter value is not valid . * @ throws UnableToDetectSchemaException * The data format is not valid . Amazon Kinesis Data Analytics cannot detect the schema for the given * streaming source . * @ throws ResourceProvisionedThroughputExceededException * Discovery failed to get a record from the streaming source because of the Amazon Kinesis Streams * < code > ProvisionedThroughputExceededException < / code > . For more information , see < a * href = " http : / / docs . aws . amazon . com / kinesis / latest / APIReference / API _ GetRecords . html " > GetRecords < / a > in the * Amazon Kinesis Streams API Reference . * @ throws ServiceUnavailableException * The service cannot complete the request . * @ throws InvalidRequestException * The request JSON is not valid for the operation . * @ sample AmazonKinesisAnalyticsV2 . DiscoverInputSchema * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kinesisanalyticsv2-2018-05-23 / DiscoverInputSchema " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DiscoverInputSchemaResult discoverInputSchema ( DiscoverInputSchemaRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDiscoverInputSchema ( request ) ;
public class IntIterator { /** * Returns an infinite { @ code IntIterator } . * @ param supplier * @ return */ public static IntIterator generate ( final IntSupplier supplier ) { } }
N . checkArgNotNull ( supplier ) ; return new IntIterator ( ) { @ Override public boolean hasNext ( ) { return true ; } @ Override public int nextInt ( ) { return supplier . getAsInt ( ) ; } } ;
public class RelatedTablesCoreExtension { /** * Create a user related table if it does not exist . When not created , there * is no guarantee that an existing table has the same schema as the * provided tabled . * @ param relatedTable * user related table * @ return true if created , false if the table already existed * @ since 3.2.0 */ public boolean createRelatedTable ( UserTable < ? extends UserColumn > relatedTable ) { } }
boolean created = false ; String relatedTableName = relatedTable . getTableName ( ) ; if ( ! geoPackage . isTable ( relatedTableName ) ) { geoPackage . createUserTable ( relatedTable ) ; try { // Create the contents Contents contents = new Contents ( ) ; contents . setTableName ( relatedTableName ) ; contents . setDataTypeString ( relatedTable . getDataType ( ) ) ; contents . setIdentifier ( relatedTableName ) ; ContentsDao contentsDao = geoPackage . getContentsDao ( ) ; contentsDao . create ( contents ) ; contentsDao . refresh ( contents ) ; relatedTable . setContents ( contents ) ; } catch ( RuntimeException e ) { geoPackage . deleteTableQuietly ( relatedTableName ) ; throw e ; } catch ( SQLException e ) { geoPackage . deleteTableQuietly ( relatedTableName ) ; throw new GeoPackageException ( "Failed to create table and metadata: " + relatedTableName , e ) ; } created = true ; } return created ;
public class BizwifiAPI { /** * 连Wi - Fi小程序 - 连Wi - Fi完成页跳转小程序 * 场景介绍 : * 设置需要跳转的小程序 , 连网完成点击 “ 完成 ” 按钮 , 即可进入设置的小程序 。 * 注 : 只能跳转与公众号关联的小程序 。 * @ param accessToken accessToken * @ param finishPageSet finishPageSet * @ return BaseResult */ public static BaseResult finishpageSet ( String accessToken , FinishPageSet finishPageSet ) { } }
return finishpageSet ( accessToken , JsonUtil . toJSONString ( finishPageSet ) ) ;
public class RBBINode { /** * / CLOVER : OFF */ static void printInt ( int i , int minWidth ) { } }
String s = Integer . toString ( i ) ; printString ( s , Math . max ( minWidth , s . length ( ) + 1 ) ) ;
public class MapIterate { /** * Get and return the value in the Map at the specified key , or if there is no value at the key , return the result * of evaluating the specified { @ link Function0 } , and put that value in the map at the specified key . * This method handles the { @ code null } - value - at - key case correctly . */ public static < K , V > V getIfAbsentPut ( Map < K , V > map , K key , Function0 < ? extends V > instanceBlock ) { } }
if ( map instanceof MutableMap ) { return ( ( MutableMap < K , V > ) map ) . getIfAbsentPut ( key , instanceBlock ) ; } V result = map . get ( key ) ; if ( MapIterate . isAbsent ( result , map , key ) ) { result = instanceBlock . value ( ) ; map . put ( key , result ) ; } return result ;
public class TransactedReturnGeneratedKeysBuilder { /** * Transforms the results using the given function . * @ param mapper * maps the query results to an object * @ return the results of the query as an Observable */ @ Override public < T > Flowable < Tx < T > > get ( @ Nonnull ResultSetMapper < ? extends T > mapper ) { } }
Preconditions . checkNotNull ( mapper , "mapper cannot be null" ) ; return Flowable . defer ( ( ) -> { AtomicReference < Connection > connection = new AtomicReference < Connection > ( ) ; Flowable < T > o = Update . < T > createReturnGeneratedKeys ( update . updateBuilder . connections . map ( c -> Util . toTransactedConnection ( connection , c ) ) , update . parameterGroupsToFlowable ( ) , update . updateBuilder . sql , mapper , false ) ; return o . materialize ( ) . flatMap ( n -> Tx . toTx ( n , connection . get ( ) , db ) ) . doOnNext ( tx -> { if ( tx . isComplete ( ) ) { ( ( TxImpl < T > ) tx ) . connection ( ) . commit ( ) ; } } ) ; } ) ;
public class StreamingPoliciesInner { /** * List Streaming Policies . * Lists the Streaming Policies in the account . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; StreamingPolicyInner & gt ; object */ public Observable < Page < StreamingPolicyInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < StreamingPolicyInner > > , Page < StreamingPolicyInner > > ( ) { @ Override public Page < StreamingPolicyInner > call ( ServiceResponse < Page < StreamingPolicyInner > > response ) { return response . body ( ) ; } } ) ;
public class LstnDbChanged { /** * < p > Make something with a model . < / p > * @ param pReqVars additional request scoped parameters * @ throws Exception - an exception * @ param pFactoryAppBeans with make */ @ Override public final void make ( final Map < String , Object > pReqVars ) throws Exception { } }
@ SuppressWarnings ( "unchecked" ) AFactoryAppBeans < RS > factoryAppBeans = ( AFactoryAppBeans < RS > ) this . factoryAndServlet . getFactoryAppBeans ( ) ; this . factoryAndServlet . getHttpServlet ( ) . getServletContext ( ) . setAttribute ( "srvI18n" , factoryAppBeans . lazyGet ( "ISrvI18n" ) ) ; this . factoryAndServlet . getHttpServlet ( ) . getServletContext ( ) . setAttribute ( "sessionTracker" , factoryAppBeans . lazyGet ( "ISessionTracker" ) ) ;
public class TypeCheckUtil { /** * Is obj class boolean . * @ param type the type * @ return the boolean */ public static boolean isObjClass ( Class < ? > type ) { } }
if ( type . isPrimitive ( ) || type . isEnum ( ) || type . isArray ( ) ) { return false ; } String block = BASIC_PACKAGE_PREFIX_LIST . stream ( ) . filter ( prefix -> type . getName ( ) . startsWith ( prefix ) ) . findAny ( ) . orElse ( null ) ; if ( block != null ) { return false ; } return ! PRIMITIVE_CLASS_LIST . contains ( type ) ;
public class FileSystemShellUtils { /** * Validates the path , verifying that it contains the { @ link Constants # HEADER } or * { @ link Constants # HEADER _ FT } and a hostname : port specified . * @ param path the path to be verified * @ param alluxioConf Alluxio configuration * @ return the verified path in a form like alluxio : / / host : port / dir . If only the " / dir " or " dir " * part is provided , the host and port are retrieved from property , * alluxio . master . hostname and alluxio . master . port , respectively . */ public static String validatePath ( String path , AlluxioConfiguration alluxioConf ) throws IOException { } }
if ( path . startsWith ( Constants . HEADER ) || path . startsWith ( Constants . HEADER_FT ) ) { if ( ! path . contains ( ":" ) ) { throw new IOException ( "Invalid Path: " + path + ". Use " + Constants . HEADER + "host:port/ ," + Constants . HEADER_FT + "host:port/" + " , or /file" ) ; } else { return path ; } } else { String hostname = NetworkAddressUtils . getConnectHost ( ServiceType . MASTER_RPC , alluxioConf ) ; int port = alluxioConf . getInt ( PropertyKey . MASTER_RPC_PORT ) ; if ( alluxioConf . getBoolean ( PropertyKey . ZOOKEEPER_ENABLED ) ) { return PathUtils . concatPath ( Constants . HEADER_FT + hostname + ":" + port , path ) ; } return PathUtils . concatPath ( Constants . HEADER + hostname + ":" + port , path ) ; }
public class GVREventManager { /** * Return the method in eventsClass by checking the signature . * RuntimeException is thrown if the event is not found in the eventsClass interface , * or the parameter types don ' t match . */ private Method findHandlerMethod ( Object target , Class < ? extends IEvents > eventsClass , String eventName , Object [ ] params ) { } }
// Use cached method if available . Note : no further type checking is done if the // method has been cached . It will be checked by JRE when the method is invoked . Method cachedMethod = getCachedMethod ( target , eventName ) ; if ( cachedMethod != null ) { return cachedMethod ; } // Check the event and params against the eventsClass interface object . Method nameMatch = null ; Method signatureMatch = null ; for ( Method method : eventsClass . getMethods ( ) ) { // Match method name and event name if ( method . getName ( ) . equals ( eventName ) ) { nameMatch = method ; // Check number of parameters Class < ? > [ ] types = method . getParameterTypes ( ) ; if ( types . length != params . length ) continue ; // Check parameter types int i = 0 ; boolean foundMatchedMethod = true ; for ( Class < ? > type : types ) { Object param = params [ i ++ ] ; if ( ! isInstanceWithAutoboxing ( type , param ) ) { foundMatchedMethod = false ; break ; } } if ( foundMatchedMethod ) { signatureMatch = method ; break ; } } } // Error if ( nameMatch == null ) { throw new RuntimeException ( String . format ( "The interface contains no method %s" , eventName ) ) ; } else if ( signatureMatch == null ) { throw new RuntimeException ( String . format ( "The interface contains a method %s but " + "parameters don't match" , eventName ) ) ; } // Cache the method for the target , even if it doesn ' t implement the interface . This is // to avoid always verifying the event . addCachedMethod ( target , eventName , signatureMatch ) ; return signatureMatch ;
public class ShapePath { /** * Adds a { @ link ShadowCompatOperation } , adding an { @ link ArcShadowOperation } if needed in order * to connect the previous shadow end to the new shadow operation ' s beginning . */ private void addShadowCompatOperation ( ShadowCompatOperation shadowOperation , float startShadowAngle , float endShadowAngle ) { } }
addConnectingShadowIfNecessary ( startShadowAngle ) ; shadowCompatOperations . add ( shadowOperation ) ; currentShadowAngle = endShadowAngle ;
public class AWSIotClient { /** * Deprecates a thing type . You can not associate new things with deprecated thing type . * @ param deprecateThingTypeRequest * The input for the DeprecateThingType operation . * @ return Result of the DeprecateThingType operation returned by the service . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ throws InvalidRequestException * The request is not valid . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws UnauthorizedException * You are not authorized to perform this operation . * @ throws ServiceUnavailableException * The service is temporarily unavailable . * @ throws InternalFailureException * An unexpected error has occurred . * @ sample AWSIot . DeprecateThingType */ @ Override public DeprecateThingTypeResult deprecateThingType ( DeprecateThingTypeRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeprecateThingType ( request ) ;
public class ArtifactDetailsLayout { /** * Set title of artifact details header layout . */ private void setTitleOfLayoutHeader ( ) { } }
titleOfArtifactDetails . setValue ( HawkbitCommonUtil . getArtifactoryDetailsLabelId ( "" , i18n ) ) ; titleOfArtifactDetails . setContentMode ( ContentMode . HTML ) ;
public class MBeanServerHandler { /** * Unregister all previously registered MBean . This is tried for all previously * registered MBeans * @ throws JMException if an exception occurs during unregistration */ public final void destroy ( ) throws JMException { } }
synchronized ( mBeanHandles ) { List < JMException > exceptions = new ArrayList < JMException > ( ) ; List < MBeanHandle > unregistered = new ArrayList < MBeanHandle > ( ) ; for ( MBeanHandle handle : mBeanHandles ) { try { unregistered . add ( handle ) ; handle . server . unregisterMBean ( handle . objectName ) ; } catch ( InstanceNotFoundException e ) { exceptions . add ( e ) ; } catch ( MBeanRegistrationException e ) { exceptions . add ( e ) ; } } // Remove all successfully unregistered handles mBeanHandles . removeAll ( unregistered ) ; // Throw error if any exception occured during unregistration if ( exceptions . size ( ) == 1 ) { throw exceptions . get ( 0 ) ; } else if ( exceptions . size ( ) > 1 ) { StringBuilder ret = new StringBuilder ( ) ; for ( JMException e : exceptions ) { ret . append ( e . getMessage ( ) ) . append ( ", " ) ; } throw new JMException ( ret . substring ( 0 , ret . length ( ) - 2 ) ) ; } } // Unregister any notification listener mBeanServerManager . destroy ( ) ;
public class Vector3Axis { /** * Checks if vector is { @ link Float # isInfinite } or not . * @ return true if all dimensions are { @ link Float # isInfinite } , otherwise - false */ public boolean isInfinite ( ) { } }
return Float . isInfinite ( x ) && Float . isInfinite ( y ) && Float . isInfinite ( z ) ;
public class SavedQueriesPanel { /** * End of variables declaration / / GEN - END : variables */ public void fireQueryChanged ( String newgroup , String newquery , String newid ) { } }
for ( SavedQueriesPanelListener listener : listeners ) { listener . selectedQueryChanged ( newgroup , newquery , newid ) ; }
public class Cell { /** * Set all constraints to cell default values . */ void defaults ( ) { } }
minWidth = new MinWidthValue < C , T > ( layout . toolkit ) ; minHeight = new MinHeightValue < C , T > ( layout . toolkit ) ; prefWidth = new PrefWidthValue < C , T > ( layout . toolkit ) ; prefHeight = new PrefHeightValue < C , T > ( layout . toolkit ) ; maxWidth = new MaxWidthValue < C , T > ( layout . toolkit ) ; maxHeight = new MaxHeightValue < C , T > ( layout . toolkit ) ; spaceTop = new FixedValue < C , T > ( layout . toolkit , 0 ) ; spaceLeft = new FixedValue < C , T > ( layout . toolkit , 0 ) ; spaceBottom = new FixedValue < C , T > ( layout . toolkit , 0 ) ; spaceRight = new FixedValue < C , T > ( layout . toolkit , 0 ) ; padTop = new FixedValue < C , T > ( layout . toolkit , 0 ) ; padLeft = new FixedValue < C , T > ( layout . toolkit , 0 ) ; padBottom = new FixedValue < C , T > ( layout . toolkit , 0 ) ; padRight = new FixedValue < C , T > ( layout . toolkit , 0 ) ; fillX = 0f ; fillY = 0f ; align = CENTER ; expandX = 0 ; expandY = 0 ; ignore = false ; colspan = 1 ; uniformX = null ; uniformY = null ;
public class ExtensionManager { /** * Notifies the extensions that the kernel is stopped */ public void stopped ( ) { } }
for ( KernelExtension kernelExtension : kernelExtensions . keySet ( ) ) { kernelExtension . stopped ( kernelExtensions . get ( kernelExtension ) ) ; }
public class MethodDelegation { /** * { @ inheritDoc } */ public ByteCodeAppender appender ( Target implementationTarget ) { } }
ImplementationDelegate . Compiled compiled = implementationDelegate . compile ( implementationTarget . getInstrumentedType ( ) ) ; return new Appender ( implementationTarget , new MethodDelegationBinder . Processor ( compiled . getRecords ( ) , ambiguityResolver , bindingResolver ) , terminationHandler , assigner , compiled ) ;
public class X500Name { /** * Return an immutable List of the the AVAs contained in all the * RDNs of this X500Name . */ public List < AVA > allAvas ( ) { } }
List < AVA > list = allAvaList ; if ( list == null ) { list = new ArrayList < AVA > ( ) ; for ( int i = 0 ; i < names . length ; i ++ ) { list . addAll ( names [ i ] . avas ( ) ) ; } } return list ;
public class SessionIdGenerator { /** * Create a new random number generator instance we should use for * generating session identifiers . */ private SecureRandom createSecureRandom ( ) { } }
SecureRandom result = null ; long t1 = System . currentTimeMillis ( ) ; if ( secureRandomClass != null ) { try { // Construct and seed a new random number generator Class < ? > clazz = Class . forName ( secureRandomClass ) ; result = ( SecureRandom ) clazz . newInstance ( ) ; } catch ( Exception e ) { log . log ( Level . SEVERE , "Exception initializing random number generator of class " + secureRandomClass + ". Falling back to java.secure.SecureRandom" , e ) ; } } if ( result == null ) { // No secureRandomClass or creation failed . Use SecureRandom . try { if ( secureRandomProvider != null && secureRandomProvider . length ( ) > 0 ) { result = SecureRandom . getInstance ( secureRandomAlgorithm , secureRandomProvider ) ; } else { if ( secureRandomAlgorithm != null && secureRandomAlgorithm . length ( ) > 0 ) { result = SecureRandom . getInstance ( secureRandomAlgorithm ) ; } } } catch ( NoSuchAlgorithmException e ) { log . log ( Level . SEVERE , "Exception initializing random number generator using algorithm: " + secureRandomAlgorithm , e ) ; } catch ( NoSuchProviderException e ) { log . log ( Level . SEVERE , "Exception initializing random number generator using provider: " + secureRandomProvider , e ) ; } } if ( result == null ) { // Invalid provider / algorithm try { result = SecureRandom . getInstance ( "SHA1PRNG" ) ; } catch ( NoSuchAlgorithmException e ) { log . log ( Level . SEVERE , "Invalid provider / algoritm SHA1PRNG for generate secure random token" , e ) ; } } if ( result == null ) { // Nothing works - use platform default result = new SecureRandom ( ) ; } // Force seeding to take place result . nextInt ( ) ; long t2 = System . currentTimeMillis ( ) ; if ( ( t2 - t1 ) > 100 ) { if ( log . isLoggable ( Level . FINEST ) ) { log . info ( "Creation of SecureRandom instance for session ID generation using [" + result . getAlgorithm ( ) + "] took [" + Long . valueOf ( t2 - t1 ) + "] milliseconds." ) ; } } return result ;
public class MapModel { /** * Deselect the currently selected layer , includes sending the deselect events . * @ param layer * layer to clear */ private void deselectLayer ( Layer < ? > layer ) { } }
if ( layer != null ) { layer . setSelected ( false ) ; handlerManager . fireEvent ( new LayerDeselectedEvent ( layer ) ) ; }
public class AbstractDAO { /** * Get the results of a query . * @ param query the query to run * @ return the list of matched query results * @ see Query # list ( ) */ protected List < E > list ( Query < E > query ) throws HibernateException { } }
return requireNonNull ( query ) . list ( ) ;
public class LexTokenReader { /** * Read a fully qualified module ` name . * @ return A list of one or two name parts . */ private List < String > rdName ( ) { } }
List < String > names = new Vector < String > ( ) ; names . add ( rdIdentifier ( ) ) ; if ( ch == '`' ) { if ( startOfName ( rdCh ( ) ) ) { names . add ( rdIdentifier ( ) ) ; } } if ( names . size ( ) == 2 ) { // We have the strange mk _ Mod ` name case . . . String first = names . get ( 0 ) ; if ( first . startsWith ( "mk_" ) || first . startsWith ( "is_" ) ) { List < String > one = new Vector < String > ( ) ; one . add ( first + "`" + names . get ( 1 ) ) ; names = one ; } } return names ;
public class SecurityContextBuilder { /** * Builds SslContext using protected keystore and truststores . Adequate for mutual TLS connections . * @ param keystorePath Path for keystore file * @ param keystorePassword Password for protected keystore file * @ param truststorePath Path for truststore file * @ param truststorePassword Password for protected truststore file * @ param keyManagerAlgorithm Algorithm for keyManager used to process keystorefile * @ return SslContext ready to use * @ throws SecurityContextException */ public static SslContext forKeystoreAndTruststore ( String keystorePath , String keystorePassword , String truststorePath , String truststorePassword , String keyManagerAlgorithm ) throws SecurityContextException { } }
try { return forKeystoreAndTruststore ( new FileInputStream ( keystorePath ) , keystorePassword , new FileInputStream ( truststorePath ) , truststorePassword , keyManagerAlgorithm ) ; } catch ( Exception e ) { throw new SecurityContextException ( e ) ; }
public class Jenkins { /** * Parses a version string into { @ link VersionNumber } , or null if it ' s not parseable as a version number * ( such as when Jenkins is run with " mvn hudson - dev : run " ) */ private static @ CheckForNull VersionNumber toVersion ( @ CheckForNull String versionString ) { } }
if ( versionString == null ) { return null ; } try { return new VersionNumber ( versionString ) ; } catch ( NumberFormatException e ) { try { // for non - released version of Jenkins , this looks like " 1.345 ( private - foobar ) , so try to approximate . int idx = versionString . indexOf ( ' ' ) ; if ( idx > 0 ) { return new VersionNumber ( versionString . substring ( 0 , idx ) ) ; } } catch ( NumberFormatException ignored ) { // fall through } // totally unparseable return null ; } catch ( IllegalArgumentException e ) { // totally unparseable return null ; }
public class MainController { /** * Refreshes the iframe content . */ @ Override public void refresh ( ) { } }
String url = mockupTypes . getUrl ( mockupType ) ; if ( mockupId == null || url == null ) { iframe . setSrc ( null ) ; return ; } iframe . setSrc ( String . format ( url , mockupId , System . currentTimeMillis ( ) ) ) ;
public class Properties { /** * Returns the string property associated with { @ code propName } , or { @ code * defaultValue } if there is no property . */ public String getProperty ( String propName , String defaultValue ) { } }
return props . getProperty ( propName , defaultValue ) ;
public class DirectLogFetcher { /** * Connect MySQL master to fetch binlog . */ public void open ( Connection conn , String fileName , long filePosition , final int serverId , boolean nonBlocking ) throws IOException { } }
try { this . conn = conn ; Class < ? > connClazz = Class . forName ( "com.mysql.jdbc.ConnectionImpl" ) ; Object unwrapConn = unwrapConnection ( conn , connClazz ) ; if ( unwrapConn == null ) { throw new IOException ( "Unable to unwrap " + conn . getClass ( ) . getName ( ) + " to com.mysql.jdbc.ConnectionImpl" ) ; } // Get underlying IO streams for network communications . Object connIo = getDeclaredField ( unwrapConn , connClazz , "io" ) ; if ( connIo == null ) { throw new IOException ( "Get null field:" + conn . getClass ( ) . getName ( ) + "#io" ) ; } mysqlOutput = ( OutputStream ) getDeclaredField ( connIo , connIo . getClass ( ) , "mysqlOutput" ) ; mysqlInput = ( InputStream ) getDeclaredField ( connIo , connIo . getClass ( ) , "mysqlInput" ) ; if ( filePosition == 0 ) filePosition = BIN_LOG_HEADER_SIZE ; sendBinlogDump ( fileName , filePosition , serverId , nonBlocking ) ; position = 0 ; } catch ( IOException e ) { close ( ) ; /* Do cleanup */ logger . error ( "Error on COM_BINLOG_DUMP: file = " + fileName + ", position = " + filePosition ) ; throw e ; } catch ( ClassNotFoundException e ) { close ( ) ; /* Do cleanup */ throw new IOException ( "Unable to load com.mysql.jdbc.ConnectionImpl" , e ) ; }
public class StyleHelper { /** * Sets the ui object to be visible on the device size * @ param uiObject object to be visible on the device size * @ param deviceSize device size */ public static void setVisibleOn ( final UIObject uiObject , final DeviceSize deviceSize ) { } }
// Split the enum up by _ to get the different devices // Separates the SM _ MD into [ SM , MD ] so we can add the right styles final String [ ] deviceString = deviceSize . name ( ) . split ( "_" ) ; for ( final String device : deviceString ) { // Case back to basic enum ( PRINT , XS , SM , MD , LG ) final DeviceSize size = DeviceSize . valueOf ( device ) ; switch ( size ) { case PRINT : addEnumStyleName ( uiObject , Responsiveness . VISIBLE_PRINT ) ; break ; case XS : addEnumStyleName ( uiObject , Responsiveness . VISIBLE_XS ) ; break ; case SM : addEnumStyleName ( uiObject , Responsiveness . VISIBLE_SM ) ; break ; case MD : addEnumStyleName ( uiObject , Responsiveness . VISIBLE_MD ) ; break ; case LG : addEnumStyleName ( uiObject , Responsiveness . VISIBLE_LG ) ; break ; default : break ; } }
public class ApiOvhMe { /** * Create a default IP restriction for your future VoIP lines * REST : POST / me / telephony / defaultIpRestriction * @ param subnet [ required ] The IPv4 subnet you want to allow * @ param type [ required ] The protocol you want to restrict ( sip / mgcp ) */ public OvhDefaultIpRestriction telephony_defaultIpRestriction_POST ( String subnet , OvhProtocolEnum type ) throws IOException { } }
String qPath = "/me/telephony/defaultIpRestriction" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "subnet" , subnet ) ; addBody ( o , "type" , type ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhDefaultIpRestriction . class ) ;
public class JingleSession { /** * Dispatch an incoming packet . The method is responsible for recognizing * the stanza type and , depending on the current state , delivering the * stanza to the right event handler and wait for a response . * @ param iq * the stanza received * @ return the new Jingle stanza to send . * @ throws XMPPException * @ throws SmackException * @ throws InterruptedException */ @ Override public List < IQ > dispatchIncomingPacket ( IQ iq , String id ) throws XMPPException , SmackException , InterruptedException { } }
List < IQ > responses = new ArrayList < > ( ) ; IQ response = null ; if ( iq != null ) { if ( iq . getType ( ) . equals ( IQ . Type . error ) ) { // Process errors // TODO getState ( ) . eventError ( iq ) ; } else if ( iq . getType ( ) . equals ( IQ . Type . result ) ) { // Process ACKs if ( isExpectedId ( iq . getStanzaId ( ) ) ) { // The other side provisionally accepted our session - initiate . // Kick off some negotiators . if ( iq . getStanzaId ( ) . equals ( sessionInitPacketID ) ) { startNegotiators ( ) ; } removeExpectedId ( iq . getStanzaId ( ) ) ; } } else if ( iq instanceof Jingle ) { // It is not an error : it is a Jingle packet . . . Jingle jin = ( Jingle ) iq ; JingleActionEnum action = jin . getAction ( ) ; // Depending on the state we ' re in we ' ll get different processing actions . // ( See Design Patterns AKA GoF State behavioral pattern . ) response = getSessionState ( ) . processJingle ( this , jin , action ) ; } } if ( response != null ) { // Save the packet id , for recognizing ACKs . . . addExpectedId ( response . getStanzaId ( ) ) ; responses . add ( response ) ; } return responses ;
public class Graylog2Module { /** * See comments in MessageOutput . Factory and MessageOutput . Factory2 for details */ protected MapBinder < String , MessageOutput . Factory2 < ? extends MessageOutput > > outputsMapBinder2 ( ) { } }
return MapBinder . newMapBinder ( binder ( ) , TypeLiteral . get ( String . class ) , new TypeLiteral < MessageOutput . Factory2 < ? extends MessageOutput > > ( ) { } ) ;
public class CloudTasksClient { /** * Creates a task and adds it to a queue . * < p > Tasks cannot be updated after creation ; there is no UpdateTask command . * < p > & # 42 ; For [ App Engine queues ] [ google . cloud . tasks . v2 . AppEngineHttpQueue ] , the maximum task * size is 100KB . * < p > Sample code : * < pre > < code > * try ( CloudTasksClient cloudTasksClient = CloudTasksClient . create ( ) ) { * QueueName parent = QueueName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ QUEUE ] " ) ; * Task task = Task . newBuilder ( ) . build ( ) ; * Task response = cloudTasksClient . createTask ( parent , task ) ; * < / code > < / pre > * @ param parent Required . * < p > The queue name . For example : ` projects / PROJECT _ ID / locations / LOCATION _ ID / queues / QUEUE _ ID ` * < p > The queue must already exist . * @ param task Required . * < p > The task to add . * < p > Task names have the following format : * ` projects / PROJECT _ ID / locations / LOCATION _ ID / queues / QUEUE _ ID / tasks / TASK _ ID ` . The user can * optionally specify a task [ name ] [ google . cloud . tasks . v2 . Task . name ] . If a name is not * specified then the system will generate a random unique task id , which will be set in the * task returned in the [ response ] [ google . cloud . tasks . v2 . Task . name ] . * < p > If [ schedule _ time ] [ google . cloud . tasks . v2 . Task . schedule _ time ] is not set or is in the * past then Cloud Tasks will set it to the current time . * < p > Task De - duplication : * < p > Explicitly specifying a task ID enables task de - duplication . If a task ' s ID is identical * to that of an existing task or a task that was deleted or executed recently then the call * will fail with [ ALREADY _ EXISTS ] [ google . rpc . Code . ALREADY _ EXISTS ] . If the task ' s queue was * created using Cloud Tasks , then another task with the same name can ' t be created for ~ 1hour * after the original task was deleted or executed . If the task ' s queue was created using * queue . yaml or queue . xml , then another task with the same name can ' t be created for ~ 9days * after the original task was deleted or executed . * < p > Because there is an extra lookup cost to identify duplicate task names , these * [ CreateTask ] [ google . cloud . tasks . v2 . CloudTasks . CreateTask ] calls have significantly * increased latency . Using hashed strings for the task id or for the prefix of the task id is * recommended . Choosing task ids that are sequential or have sequential prefixes , for example * using a timestamp , causes an increase in latency and error rates in all task commands . The * infrastructure relies on an approximately uniform distribution of task ids to store and * serve tasks efficiently . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Task createTask ( QueueName parent , Task task ) { } }
CreateTaskRequest request = CreateTaskRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setTask ( task ) . build ( ) ; return createTask ( request ) ;
public class DeviceDAO { /** * Loads device details from internal storage . * @ return Loaded device details . */ private Device loadDevice ( ) { } }
// Initialise object with the content saved in shared pref file . SharedPreferences sharedPreferences = getSharedPreferences ( ) ; return new Device ( ) . setApiSpaceId ( sharedPreferences . getString ( KEY_API_SPACE_ID , null ) ) . setAppVer ( sharedPreferences . getInt ( KEY_APP_VER , - 1 ) ) . setInstanceId ( sharedPreferences . getString ( KEY_INSTANCE_ID , null ) ) . setPushToken ( sharedPreferences . getString ( KEY_PUSH_TOKEN , null ) ) . setDeviceId ( sharedPreferences . getString ( KEY_DEVICE_ID , null ) ) ;
public class FutureCollectionCompletionListener { /** * Caller is responsible for ensuring that the futureConditions collection is immutable . */ public static void newFutureCollectionCompletionListener ( Collection < ApplicationDependency > futureConditions , CompletionListener < Boolean > newCL ) { } }
if ( futureConditions . isEmpty ( ) ) { newCL . successfulCompletion ( null , true ) ; } else { FutureCollectionCompletionListener futureListener = new FutureCollectionCompletionListener ( futureConditions . size ( ) , newCL ) ; futureListener . onCompletion ( futureConditions ) ; }
public class Ix { /** * Combines the next element from this and the other source Iterable via a zipper function . * If one of the source Iterables is sorter the sequence terminates eagerly . * The result ' s iterator ( ) doesn ' t support remove ( ) . * @ param < U > the other source ' s element type * @ param < R > the result value type * @ param other the the other source Iterable * @ param zipper the function that takes one from each source , not null * @ return the new Ix instance * @ throws NullPointerException if other or zipper is null * @ since 1.0 */ public final < U , R > Ix < R > zipWith ( Iterable < U > other , IxFunction2 < ? super T , ? super U , ? extends R > zipper ) { } }
return zip ( this , other , zipper ) ;
public class BandLU { /** * Computes the reciprocal condition number , using either the infinity norm * of the 1 norm . * @ param A * The matrix this is a decomposition of * @ param norm * Either < code > Norm . One < / code > or < code > Norm . Infinity < / code > * @ return The reciprocal condition number . Values close to unity indicate a * well - conditioned system , while numbers close to zero do not . */ public double rcond ( Matrix A , Norm norm ) { } }
if ( norm != Norm . One && norm != Norm . Infinity ) throw new IllegalArgumentException ( "Only the 1 or the Infinity norms are supported" ) ; if ( A . numRows ( ) != n ) throw new IllegalArgumentException ( "A.numRows() != n" ) ; if ( ! A . isSquare ( ) ) throw new IllegalArgumentException ( "!A.isSquare()" ) ; double anorm = A . norm ( norm ) ; double [ ] work = new double [ 3 * n ] ; int [ ] lwork = new int [ n ] ; intW info = new intW ( 0 ) ; doubleW rcond = new doubleW ( 0 ) ; LAPACK . getInstance ( ) . dgbcon ( norm . netlib ( ) , n , kl , ku , LU . getData ( ) , Matrices . ld ( 2 * kl + ku + 1 ) , ipiv , anorm , rcond , work , lwork , info ) ; if ( info . val < 0 ) throw new IllegalArgumentException ( ) ; return rcond . val ;