signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class GoogleAuthenticatorTokenCouchDbRepository { /** * Token count for a user .
* @ param userId user to count tokens for
* @ return count of the user ' s tokens */
@ View ( name = "count_by_userId" , map = "function(doc) { if(doc.token && doc.userId) { emit(doc.userId, doc) } }" , reduce = "_count" ) public long countByUserId ( final String userId ) { } } | val view = createQuery ( "count_by_userId" ) . key ( userId ) ; val rows = db . queryView ( view ) . getRows ( ) ; if ( rows . isEmpty ( ) ) { return 0 ; } return rows . get ( 0 ) . getValueAsInt ( ) ; |
public class StringObservable { /** * Encodes a possibly infinite stream of strings into an Observable of byte arrays .
* This method allows for more control over how malformed and unmappable characters are handled .
* < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / St . encode . png " alt = " " >
* @ param src
* @ param charsetEncoder
* @ return the Observable with a stream of encoded byte arrays */
public static Observable < byte [ ] > encode ( Observable < String > src , final CharsetEncoder charsetEncoder ) { } } | return src . map ( new Func1 < String , byte [ ] > ( ) { @ Override public byte [ ] call ( String str ) { CharBuffer cb = CharBuffer . wrap ( str ) ; ByteBuffer bb ; try { bb = charsetEncoder . encode ( cb ) ; } catch ( CharacterCodingException e ) { throw new RuntimeException ( e ) ; } return Arrays . copyOfRange ( bb . array ( ) , bb . position ( ) , bb . limit ( ) ) ; } } ) ; |
public class MesosEntrypointUtils { /** * Loads and validates the Mesos scheduler configuration .
* @ param flinkConfig the global configuration .
* @ param hostname the hostname to advertise to the Mesos master . */
public static MesosConfiguration createMesosSchedulerConfiguration ( Configuration flinkConfig , String hostname ) { } } | Protos . FrameworkInfo . Builder frameworkInfo = Protos . FrameworkInfo . newBuilder ( ) . setHostname ( hostname ) ; Protos . Credential . Builder credential = null ; if ( ! flinkConfig . contains ( MesosOptions . MASTER_URL ) ) { throw new IllegalConfigurationException ( MesosOptions . MASTER_URL . key ( ) + " must be configured." ) ; } String masterUrl = flinkConfig . getString ( MesosOptions . MASTER_URL ) ; Duration failoverTimeout = FiniteDuration . apply ( flinkConfig . getInteger ( MesosOptions . FAILOVER_TIMEOUT_SECONDS ) , TimeUnit . SECONDS ) ; frameworkInfo . setFailoverTimeout ( failoverTimeout . toSeconds ( ) ) ; frameworkInfo . setName ( flinkConfig . getString ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_NAME ) ) ; frameworkInfo . setRole ( flinkConfig . getString ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_ROLE ) ) ; frameworkInfo . setUser ( flinkConfig . getString ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_USER ) ) ; if ( flinkConfig . contains ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_PRINCIPAL ) ) { frameworkInfo . setPrincipal ( flinkConfig . getString ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_PRINCIPAL ) ) ; credential = Protos . Credential . newBuilder ( ) ; credential . setPrincipal ( frameworkInfo . getPrincipal ( ) ) ; // some environments use a side - channel to communicate the secret to Mesos ,
// and thus don ' t set the ' secret ' configuration setting
if ( flinkConfig . contains ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_SECRET ) ) { credential . setSecret ( flinkConfig . getString ( MesosOptions . RESOURCEMANAGER_FRAMEWORK_SECRET ) ) ; } } MesosConfiguration mesos = new MesosConfiguration ( masterUrl , frameworkInfo , scala . Option . apply ( credential ) ) ; return mesos ; |
public class PersistenceBrokerImpl { /** * Store / Link 1 : n collection references .
* @ param obj real object the reference starts
* @ param linkOnly if true the referenced objects will only be linked ( FK set , no reference store ) .
* Reference store setting in descriptor will be ignored in this case
* @ param cod { @ link CollectionDescriptor } of the real object
* @ param referencedObjects the referenced objects ( { @ link ManageableCollection } or Collection or Array ) or null
* @ param insert flag for insert operation */
private void storeAndLinkOneToMany ( boolean linkOnly , Object obj , CollectionDescriptor cod , Object referencedObjects , boolean insert ) { } } | if ( referencedObjects == null ) { return ; } /* Only make sense to perform ( link or / and store ) real referenced objects
or materialized collection proxy objects , because on unmaterialized collection
nothing has changed .
- if the collection is a collectionproxy and it ' s not already loaded
no need to perform an update on the referenced objects
- on insert we link and insert the referenced objects , because the proxy
collection maybe " inherited " from the object before the PK was replaced */
if ( insert || ! ( referencedObjects instanceof CollectionProxyDefaultImpl && ! ( ( CollectionProxyDefaultImpl ) referencedObjects ) . isLoaded ( ) ) ) { Iterator it = BrokerHelper . getCollectionIterator ( referencedObjects ) ; Object refObj ; while ( it . hasNext ( ) ) { refObj = it . next ( ) ; /* TODO : Check this !
arminw :
When it ' s necessary to ' link ' ( set the FK ) the 1 : n reference objects ?
1 . set FK in refObj if it is materialized
2 . if the referenced object is a proxy AND the main object needs insert
we have to materialize the real object , because the user may move a collection
of proxy objects from object A to new object B . In this case we have to replace the
FK in the proxy object with new key of object B . */
if ( insert || getProxyFactory ( ) . isMaterialized ( refObj ) ) { ClassDescriptor refCld = getClassDescriptor ( getProxyFactory ( ) . getRealClass ( refObj ) ) ; // get the real object before linking
refObj = getProxyFactory ( ) . getRealObject ( refObj ) ; link ( refObj , refCld , cod , obj , insert ) ; // if enabled cascade store and not only link , store the refObj
if ( ! linkOnly && cod . getCascadingStore ( ) == ObjectReferenceDescriptor . CASCADE_OBJECT ) { store ( refObj ) ; } } } } |
public class Gauge { /** * Defines the graphical representation of the needle that will be used .
* Values are ANGLED , ROUND and FLAT
* In principle it defines how the needle will be filled ( gradient , color )
* @ param SHAPE */
public void setNeedleShape ( final NeedleShape SHAPE ) { } } | if ( null == needleShape ) { _needleShape = null == SHAPE ? NeedleShape . ANGLED : SHAPE ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { needleShape . set ( SHAPE ) ; } |
public class CmsDomUtil { /** * Utility method to determine the effective background color . < p >
* @ param element the element
* @ return the background color */
public static String getEffectiveBackgroundColor ( Element element ) { } } | String backgroundColor = CmsDomUtil . getCurrentStyle ( element , Style . backgroundColor ) ; if ( ( CmsStringUtil . isEmptyOrWhitespaceOnly ( backgroundColor ) || isTransparent ( backgroundColor ) || backgroundColor . equals ( StyleValue . inherit . toString ( ) ) ) ) { if ( ( Document . get ( ) . getBody ( ) != element ) && ( element . getParentElement ( ) != null ) ) { backgroundColor = getEffectiveBackgroundColor ( element . getParentElement ( ) ) ; } else { // if body element has still no background color set default to white
backgroundColor = "#FFFFFF" ; } } return backgroundColor ; |
public class DatabaseInfo { /** * Returns the date of the database .
* @ return the date of the database . */
public Date getDate ( ) { } } | for ( int i = 0 ; i < info . length ( ) - 9 ; i ++ ) { if ( Character . isWhitespace ( info . charAt ( i ) ) ) { String dateString = info . substring ( i + 1 , i + 9 ) ; try { synchronized ( formatter ) { return formatter . parse ( dateString ) ; } } catch ( ParseException pe ) { } break ; } } return null ; |
public class RestartingS3InputStream { /** * Re - opens the input stream , starting at the first unread byte . */
private void reopenS3InputStream ( ) throws IOException { } } | // First attempt to close the existing input stream
try { closeS3InputStream ( ) ; } catch ( IOException ignore ) { // Ignore this exception ; we ' re re - opening because there was in issue with the existing stream
// in the first place .
} InputStream remainingIn = null ; int attempt = 0 ; while ( remainingIn == null ) { try { S3Object s3Object = _s3 . getObject ( new GetObjectRequest ( _bucket , _key ) . withRange ( _pos , _length - 1 ) ) ; // Range is inclusive , hence length - 1
remainingIn = s3Object . getObjectContent ( ) ; } catch ( AmazonClientException e ) { // Allow up to 3 retries
attempt += 1 ; if ( ! e . isRetryable ( ) || attempt == 4 ) { throw e ; } // Back - off on each retry
try { Thread . sleep ( 200 * attempt ) ; } catch ( InterruptedException interrupt ) { throw Throwables . propagate ( interrupt ) ; } } } _in = remainingIn ; |
public class ChronoFormatter { /** * / * [ deutsch ]
* < p > Konstruiert einen Formatierer f & uuml ; r einfache Zeitstempelobjekte . < / p >
* @ param pattern format pattern
* @ param type the type of the pattern to be used
* @ param locale format locale
* @ return new { @ code ChronoFormatter } - instance
* @ throws IllegalArgumentException if resolving of pattern fails
* @ see # ofPattern ( String , PatternType , Locale , Chronology )
* @ since 3.1 */
public static ChronoFormatter < PlainTimestamp > ofTimestampPattern ( String pattern , PatternType type , Locale locale ) { } } | Builder < PlainTimestamp > builder = new Builder < > ( PlainTimestamp . axis ( ) , locale ) ; addPattern ( builder , pattern , type ) ; try { return builder . build ( ) ; } catch ( IllegalStateException ise ) { throw new IllegalArgumentException ( ise ) ; } |
public class Radial1Vertical { /** * < editor - fold defaultstate = " collapsed " desc = " Image related " > */
@ Override protected BufferedImage create_THRESHOLD_Image ( final int WIDTH ) { } } | if ( WIDTH <= 0 ) { return null ; } final BufferedImage IMAGE = UTIL . createImage ( WIDTH , WIDTH , Transparency . TRANSLUCENT ) ; final Graphics2D G2 = IMAGE . createGraphics ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_RENDERING , RenderingHints . VALUE_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_DITHERING , RenderingHints . VALUE_DITHER_ENABLE ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ ALPHA _ INTERPOLATION , RenderingHints . VALUE _ ALPHA _ INTERPOLATION _ QUALITY ) ;
G2 . setRenderingHint ( RenderingHints . KEY_COLOR_RENDERING , RenderingHints . VALUE_COLOR_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ TEXT _ ANTIALIASING , RenderingHints . VALUE _ TEXT _ ANTIALIAS _ ON ) ;
final int IMAGE_WIDTH = IMAGE . getWidth ( ) ; final int IMAGE_HEIGHT = IMAGE . getHeight ( ) ; final GeneralPath THRESHOLD = new GeneralPath ( ) ; THRESHOLD . setWindingRule ( Path2D . WIND_EVEN_ODD ) ; THRESHOLD . moveTo ( IMAGE_WIDTH * 0.5 , IMAGE_HEIGHT * 0.3333333333333333 ) ; THRESHOLD . lineTo ( IMAGE_WIDTH * 0.4866666666666667 , IMAGE_HEIGHT * 0.37333333333333335 ) ; THRESHOLD . lineTo ( IMAGE_WIDTH * 0.52 , IMAGE_HEIGHT * 0.37333333333333335 ) ; THRESHOLD . lineTo ( IMAGE_WIDTH * 0.5 , IMAGE_HEIGHT * 0.3333333333333333 ) ; THRESHOLD . closePath ( ) ; final Point2D THRESHOLD_START = new Point2D . Double ( 0 , THRESHOLD . getBounds2D ( ) . getMinY ( ) ) ; final Point2D THRESHOLD_STOP = new Point2D . Double ( 0 , THRESHOLD . getBounds2D ( ) . getMaxY ( ) ) ; final float [ ] THRESHOLD_FRACTIONS = { 0.0f , 0.3f , 0.59f , 1.0f } ; final Color [ ] THRESHOLD_COLORS = { new Color ( 82 , 0 , 0 , 255 ) , new Color ( 252 , 29 , 0 , 255 ) , new Color ( 252 , 29 , 0 , 255 ) , new Color ( 82 , 0 , 0 , 255 ) } ; Util . INSTANCE . validateGradientPoints ( THRESHOLD_START , THRESHOLD_STOP ) ; final LinearGradientPaint THRESHOLD_GRADIENT = new LinearGradientPaint ( THRESHOLD_START , THRESHOLD_STOP , THRESHOLD_FRACTIONS , THRESHOLD_COLORS ) ; G2 . setPaint ( THRESHOLD_GRADIENT ) ; G2 . fill ( THRESHOLD ) ; G2 . setColor ( Color . WHITE ) ; G2 . setStroke ( new BasicStroke ( 1.0f , BasicStroke . CAP_BUTT , BasicStroke . JOIN_MITER ) ) ; G2 . draw ( THRESHOLD ) ; G2 . dispose ( ) ; return IMAGE ; |
public class ScriptPluginFileCopier { /** * Copy existing file */
public String copyFile ( final ExecutionContext executionContext , final File file , final INodeEntry node , final String destination ) throws FileCopierException { } } | return copyFile ( executionContext , file , null , null , node , destination ) ; |
public class UllmannState { /** * Check if there are any feasible mappings left for the query vertex n . We
* scan the compatibility matrix to see if any value is > 0.
* @ param n query vertex
* @ return a candidate is present */
private boolean hasCandidate ( int n ) { } } | for ( int j = ( n * matrix . mCols ) , end = ( j + matrix . mCols ) ; j < end ; j ++ ) if ( matrix . get ( j ) ) return true ; return false ; |
public class PrcMatchForeignLineCreate { /** * < p > Process entity request . < / p >
* @ param pAddParam additional param , e . g . return this line ' s
* document in " nextEntity " for farther process
* @ param pRequestData Request Data
* @ param pEntity Entity to process
* @ return Entity processed for farther process or null
* @ throws Exception - an exception */
@ Override public final MatchForeignLine process ( final Map < String , Object > pAddParam , final MatchForeignLine pEntityPb , final IRequestData pRequestData ) throws Exception { } } | MatchForeignLine entity = this . prcEntityCreate . process ( pAddParam , pEntityPb , pRequestData ) ; entity . setItsOwner ( this . prcEntityCreate . getSrvOrm ( ) . retrieveEntity ( pAddParam , entity . getItsOwner ( ) ) ) ; return entity ; |
public class DateParser { /** * Parses the given year and month to a { @ link CSLDate } object . Does not
* handle ranges .
* @ param year the year to parse . Should be a four - digit number or a String
* whose last four characters are digits .
* @ param month the month to parse . May be a number ( < code > 1-12 < / code > ) ,
* a short month name ( < code > Jan < / code > to < code > Dec < / code > ) , or a
* long month name ( < code > January < / code > to < code > December < / code > ) . This
* method is also able to recognize month names in several locales .
* @ return the { @ link CSLDate } object or null if both , the year and the
* month , could not be parsed */
public static CSLDate toDateSingle ( String year , String month ) { } } | int m = toMonth ( month ) ; // parse year
int y = - 1 ; Boolean circa = null ; if ( year != null && year . length ( ) >= 4 ) { if ( StringUtils . isNumeric ( year ) ) { y = Integer . parseInt ( year ) ; } else { String fourDigit = year . substring ( year . length ( ) - 4 ) ; if ( StringUtils . isNumeric ( fourDigit ) ) { y = Integer . parseInt ( fourDigit ) ; if ( year . length ( ) > 4 ) { circa = Boolean . TRUE ; } } } } // create result
CSLDateBuilder builder = new CSLDateBuilder ( ) ; if ( y < 0 ) { return null ; } if ( m < 0 ) { return builder . dateParts ( y ) . circa ( circa ) . build ( ) ; } return builder . dateParts ( y , m ) . circa ( circa ) . build ( ) ; |
public class NameSimilarityEdgeFilter { /** * Removes any characters in the String that we don ' t care about in the matching procedure
* TODO Currently limited to certain ' western ' languages */
private String prepareName ( String name ) { } } | // \ s = A whitespace character : [ \ t \ n \ x0B \ f \ r ]
String [ ] arr = name . split ( "\\s" ) ; List < String > list = new ArrayList < > ( arr . length ) ; for ( int i = 0 ; i < arr . length ; i ++ ) { String rewrite = NON_WORD_CHAR . matcher ( toLowerCase ( arr [ i ] ) ) . replaceAll ( "" ) ; String tmp = rewriteMap . get ( rewrite ) ; if ( tmp != null ) rewrite = tmp ; // Ignore matching short frases like de , la , . . .
if ( ! rewrite . isEmpty ( ) && rewrite . length ( ) > 2 ) { list . add ( rewrite ) ; } } return listToString ( list ) ; |
public class HtmlBuilder { /** * Build a String containing a HTML opening tag with given CSS style attribute ( s ) and concatenates the given
* content . Content should contain no HTML , because it is prepared with { @ link # htmlEncode ( String ) } .
* @ param tag String name of HTML tag
* @ param style style for tag ( plain CSS )
* @ param content content string
* @ return HTML tag element as string */
public static String openTagStyle ( String tag , String style , String ... content ) { } } | return openTag ( tag , null , style , content ) ; |
public class JmsTransporter { /** * - - - CONNECT - - - */
@ Override public void connect ( ) { } } | try { // Create JMS client and session
disconnect ( ) ; if ( factory == null ) { try { Context ctx = new InitialContext ( ) ; factory = ( TopicConnectionFactory ) ctx . lookup ( connectionFactoryJndiName ) ; } catch ( Exception cause ) { logger . error ( "TopicConnectionFactory is undefined and \"" + connectionFactoryJndiName + "\" JNDI value isn't available!" ) ; reconnect ( cause ) ; return ; } } if ( ( username == null || username . isEmpty ( ) ) && ( password == null || password . isEmpty ( ) ) ) { client = factory . createTopicConnection ( ) ; } else { client = factory . createTopicConnection ( username , password ) ; } client . setClientID ( nodeID ) ; client . start ( ) ; session = client . createTopicSession ( transacted , acknowledgeMode ) ; connected ( ) ; } catch ( Exception cause ) { reconnect ( cause ) ; } |
public class TreeNode { /** * Remove the { @ code child } from { @ code this } node ' s child array , giving it
* a { @ code null } parent .
* @ param child the child of this node to remove
* @ throws NullPointerException if the given { @ code child } is { @ code null }
* @ throws IllegalArgumentException if the given { @ code child } is not a
* child of this node */
public void remove ( final Tree < ? , ? > child ) { } } | requireNonNull ( child ) ; if ( ! isChild ( child ) ) { throw new IllegalArgumentException ( "The given child is not a child." ) ; } remove ( getIndex ( child ) ) ; |
public class Utils { /** * Null enabled equals ( ) .
* @ param a the first object
* @ param b the second object
* @ return true if both are null or both are equal */
public static < T > boolean equal ( T a , T b ) { } } | return a != null ? a . equals ( b ) : b == null ; |
public class Client { /** * Starts processing .
* Calling this method
* starts the thread that regularly flushes all enabled caches . enables the other methods on the
* instance to be called successfully
* I . e , even when the configuration disables aggregation , it is invalid to access the other
* methods of an instance until ` ` start ` ` is called - Calls to other public methods will fail with
* an { @ code IllegalStateError } . */
public synchronized void start ( ) { } } | if ( running ) { log . atInfo ( ) . log ( "%s is already started" , this ) ; return ; } log . atInfo ( ) . log ( "starting %s" , this ) ; this . stopped = false ; this . running = true ; this . reportStopwatch . reset ( ) . start ( ) ; try { schedulerThread = threads . newThread ( new Runnable ( ) { @ Override public void run ( ) { scheduleFlushes ( ) ; } } ) ; schedulerThread . start ( ) ; } catch ( RuntimeException e ) { log . atInfo ( ) . log ( BACKGROUND_THREAD_ERROR ) ; schedulerThread = null ; initializeFlushing ( ) ; } |
public class Utils { /** * Get contents from an URL .
* @ param url
* The url
* @ return -
* @ throws IOException */
public static String getUrl ( final String url ) throws Exception { } } | return getUrl ( new URL ( url ) , null , null ) ; |
public class UserAttachedFormGenerator { /** * This method creates { @ link XmlObject } of type { @ link BudgetNarrativeAttachmentsDocument } by populating data from the given
* { @ link ProposalDevelopmentDocumentContract }
* @ param proposalDevelopmentDocument for which the { @ link XmlObject } needs to be created
* @ return { @ link XmlObject } which is generated using the given { @ link ProposalDevelopmentDocumentContract } */
@ Override public XmlObject getFormObject ( ProposalDevelopmentDocumentContract proposalDevelopmentDocument ) { } } | this . pdDoc = proposalDevelopmentDocument ; S2sUserAttachedFormFileContract userAttachedFormFile = findUserAttachedFormFile ( ) ; if ( userAttachedFormFile == null ) { throw new RuntimeException ( "Cannot find XML Data" ) ; } String formXml = userAttachedFormFile . getXmlFile ( ) ; XmlObject xmlObject ; try { xmlObject = XmlObject . Factory . parse ( formXml ) ; } catch ( XmlException e ) { throw new RuntimeException ( "XmlObject not ready" ) ; } S2sUserAttachedFormContract userAttachedForm = findUserAttachedForm ( ) ; List < ? extends S2sUserAttachedFormAttContract > attachments = userAttachedForm . getS2sUserAttachedFormAtts ( ) ; for ( S2sUserAttachedFormAttContract s2sUserAttachedFormAtt : attachments ) { addAttachment ( s2sUserAttachedFormAtt ) ; } return xmlObject ; |
public class CrystalTransform { /** * Tells whether this transformation is a pure translation :
* either a pure crystal ( lattice ) translation or a fractional ( within
* unit cell ) translation : space groups Ixxx , Cxxx , Fxxx have operators
* with fractional translations within the unit cell .
* @ return */
public boolean isPureTranslation ( ) { } } | if ( isPureCrystalTranslation ( ) ) return true ; if ( SpaceGroup . deltaComp ( matTransform . m00 , 1 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m01 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m02 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m10 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m11 , 1 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m12 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m20 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m21 , 0 , SpaceGroup . DELTA ) && SpaceGroup . deltaComp ( matTransform . m22 , 1 , SpaceGroup . DELTA ) && ( Math . abs ( matTransform . m03 - 0.0 ) > SpaceGroup . DELTA || Math . abs ( matTransform . m13 - 0.0 ) > SpaceGroup . DELTA || Math . abs ( matTransform . m23 - 0.0 ) > SpaceGroup . DELTA ) ) { return true ; } return false ; |
public class InsnList { /** * Returns the instruction whose index is given . This method builds a cache
* of the instructions in this list to avoid scanning the whole list each
* time it is called . Once the cache is built , this method run in constant
* time . This cache is invalidated by all the methods that modify the list .
* @ param index
* the index of the instruction that must be returned .
* @ return the instruction whose index is given .
* @ throws IndexOutOfBoundsException
* if ( index & lt ; 0 | | index & gt ; = size ( ) ) . */
public AbstractInsnNode get ( final int index ) { } } | if ( index < 0 || index >= size ) { throw new IndexOutOfBoundsException ( ) ; } if ( cache == null ) { cache = toArray ( ) ; } return cache [ index ] ; |
public class Strings { /** * Convenience method to return a String array as a delimited ( e . g . CSV )
* String . E . g . useful for < code > toString ( ) < / code > implementations .
* @ param arr the array to display
* @ param delim the delimiter to use ( probably a " , " )
* @ return the delimited String */
public static String arrayToDelimitedString ( Object [ ] arr , String delim ) { } } | if ( Objects . isEmpty ( arr ) ) { return "" ; } if ( arr . length == 1 ) { return Objects . nullSafeToString ( arr [ 0 ] ) ; } StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < arr . length ; i ++ ) { if ( i > 0 ) { sb . append ( delim ) ; } sb . append ( arr [ i ] ) ; } return sb . toString ( ) ; |
public class S3RecoverableWriter { /** * - - - - - Static Constructor - - - - - */
public static S3RecoverableWriter writer ( final FileSystem fs , final FunctionWithException < File , RefCountedFile , IOException > tempFileCreator , final S3AccessHelper s3AccessHelper , final Executor uploadThreadPool , final long userDefinedMinPartSize , final int maxConcurrentUploadsPerStream ) { } } | checkArgument ( userDefinedMinPartSize >= S3_MULTIPART_MIN_PART_SIZE ) ; final S3RecoverableMultipartUploadFactory uploadFactory = new S3RecoverableMultipartUploadFactory ( fs , s3AccessHelper , maxConcurrentUploadsPerStream , uploadThreadPool , tempFileCreator ) ; return new S3RecoverableWriter ( s3AccessHelper , uploadFactory , tempFileCreator , userDefinedMinPartSize ) ; |
public class AptControlInterface { /** * Does the method have a Java Beans setter method signature ( is varient ) .
* @ param method AptMethod instance .
* @ return true if setter . */
private boolean isSetter ( AptMethod method ) { } } | String methodName = method . getName ( ) ; if ( methodName . length ( ) < 4 ) return false ; if ( ! methodName . startsWith ( "set" ) ) return false ; String argList = method . getArgList ( ) ; if ( argList . length ( ) == 0 ) return false ; if ( argList . indexOf ( ',' ) > - 1 ) return false ; if ( ! "void" . equals ( method . getReturnType ( ) ) ) return false ; return true ; |
public class FBOGraphics { /** * Initialise the GL context */
protected void initGL ( ) { } } | GL11 . glEnable ( GL11 . GL_TEXTURE_2D ) ; GL11 . glShadeModel ( GL11 . GL_SMOOTH ) ; GL11 . glDisable ( GL11 . GL_DEPTH_TEST ) ; GL11 . glDisable ( GL11 . GL_LIGHTING ) ; GL11 . glClearColor ( 0.0f , 0.0f , 0.0f , 0.0f ) ; GL11 . glClearDepth ( 1 ) ; GL11 . glEnable ( GL11 . GL_BLEND ) ; GL11 . glBlendFunc ( GL11 . GL_SRC_ALPHA , GL11 . GL_ONE_MINUS_SRC_ALPHA ) ; GL11 . glViewport ( 0 , 0 , screenWidth , screenHeight ) ; GL11 . glMatrixMode ( GL11 . GL_MODELVIEW ) ; GL11 . glLoadIdentity ( ) ; enterOrtho ( ) ; |
public class SignUtils { /** * 生成密码 , 使用shaHex加密
* @ return */
public static String createPassword ( String password ) { } } | String data = DigestUtils . shaHex ( password ) ; return data ; |
public class FeaturesImpl { /** * Gets all the phraselist features .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param listPhraseListsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; PhraseListFeatureInfo & gt ; object if successful . */
public List < PhraseListFeatureInfo > listPhraseLists ( UUID appId , String versionId , ListPhraseListsOptionalParameter listPhraseListsOptionalParameter ) { } } | return listPhraseListsWithServiceResponseAsync ( appId , versionId , listPhraseListsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class ResourcesInner { /** * Gets a resource by ID .
* @ param resourceId The fully qualified ID of the resource , including the resource name and resource type . Use the format , / subscriptions / { guid } / resourceGroups / { resource - group - name } / { resource - provider - namespace } / { resource - type } / { resource - name }
* @ param apiVersion The API version to use for the operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < GenericResourceInner > getByIdAsync ( String resourceId , String apiVersion , final ServiceCallback < GenericResourceInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getByIdWithServiceResponseAsync ( resourceId , apiVersion ) , serviceCallback ) ; |
public class CMLCoreModule { /** * Clean all data about read atoms . */
protected void newAtomData ( ) { } } | atomCounter = 0 ; elsym = new ArrayList < String > ( ) ; elid = new ArrayList < String > ( ) ; eltitles = new ArrayList < String > ( ) ; formalCharges = new ArrayList < String > ( ) ; partialCharges = new ArrayList < String > ( ) ; isotope = new ArrayList < String > ( ) ; atomicNumbers = new ArrayList < String > ( ) ; exactMasses = new ArrayList < String > ( ) ; x3 = new ArrayList < String > ( ) ; y3 = new ArrayList < String > ( ) ; z3 = new ArrayList < String > ( ) ; x2 = new ArrayList < String > ( ) ; y2 = new ArrayList < String > ( ) ; xfract = new ArrayList < String > ( ) ; yfract = new ArrayList < String > ( ) ; zfract = new ArrayList < String > ( ) ; hCounts = new ArrayList < String > ( ) ; atomParities = new ArrayList < String > ( ) ; parityARef1 = new ArrayList < String > ( ) ; parityARef2 = new ArrayList < String > ( ) ; parityARef3 = new ArrayList < String > ( ) ; parityARef4 = new ArrayList < String > ( ) ; atomAromaticities = new ArrayList < String > ( ) ; atomDictRefs = new ArrayList < String > ( ) ; spinMultiplicities = new ArrayList < String > ( ) ; occupancies = new ArrayList < String > ( ) ; atomCustomProperty = new HashMap < Integer , List < String > > ( ) ; |
public class XGMMLUtility { /** * Returns a shape for the supplied { @ link FunctionEnum } .
* @ param fe { @ link FunctionEnum }
* @ return Non - null { @ link String }
* @ see # DFLT _ NODE _ SHAPE */
public static String type ( FunctionEnum fe ) { } } | if ( fe == null ) { return DFLT_NODE_SHAPE ; } switch ( fe ) { case ABUNDANCE : return "ver_ellipsis" ; case BIOLOGICAL_PROCESS : return "rhombus" ; case CATALYTIC_ACTIVITY : return "hexagon" ; case CELL_SECRETION : return "arc" ; case CELL_SURFACE_EXPRESSION : return "arc" ; case CHAPERONE_ACTIVITY : return "hexagon" ; case COMPLEX_ABUNDANCE : return "hor_ellipsis" ; case COMPOSITE_ABUNDANCE : return "hor_ellipsis" ; case DEGRADATION : return "hor_ellipsis" ; case GENE_ABUNDANCE : return "hor_ellipsis" ; case GTP_BOUND_ACTIVITY : return "hexagon" ; case KINASE_ACTIVITY : return "hexagon" ; case MICRORNA_ABUNDANCE : return "hor_ellipsis" ; case MOLECULAR_ACTIVITY : return "hexagon" ; case PATHOLOGY : return "rhombus" ; case PEPTIDASE_ACTIVITY : return "hexagon" ; case PHOSPHATASE_ACTIVITY : return "hexagon" ; case PRODUCTS : case PROTEIN_ABUNDANCE : return "hor_ellipsis" ; case REACTANTS : case RIBOSYLATION_ACTIVITY : return "hexagon" ; case RNA_ABUNDANCE : return "hor_ellipsis" ; case TRANSCRIPTIONAL_ACTIVITY : return "hexagon" ; case TRANSPORT_ACTIVITY : return "hexagon" ; } return DFLT_NODE_SHAPE ; |
public class GradleDependencyResolutionHelper { /** * Resolve the given artifact specifications .
* @ param project the Gradle project reference .
* @ param specs the specifications that need to be resolved .
* @ param transitive should the artifacts be resolved transitively ?
* @ param excludeDefaults should we skip resolving artifacts that belong to the Thorntail group ?
* @ return collection of resolved artifact specifications . */
public static Set < ArtifactSpec > resolveArtifacts ( Project project , Collection < ArtifactSpec > specs , boolean transitive , boolean excludeDefaults ) { } } | if ( project == null ) { throw new IllegalArgumentException ( "Gradle project reference cannot be null." ) ; } if ( specs == null ) { project . getLogger ( ) . warn ( "Artifact specification collection is null." ) ; return Collections . emptySet ( ) ; } final Configuration config = project . getConfigurations ( ) . detachedConfiguration ( ) . setTransitive ( transitive ) ; final DependencySet dependencySet = config . getDependencies ( ) ; final Map < String , Project > projectGAVCoordinates = getAllProjects ( project ) ; final ProjectAccessListener listener = new DefaultProjectAccessListener ( ) ; Set < ArtifactSpec > result = new HashSet < > ( ) ; specs . forEach ( s -> { // 1 . Do we need to resolve this entry ?
final String specGAV = String . format ( "%s:%s:%s" , s . groupId ( ) , s . artifactId ( ) , s . version ( ) ) ; boolean resolved = s . file != null ; boolean projectEntry = projectGAVCoordinates . containsKey ( specGAV ) ; // 2 . Should we skip this spec ?
if ( excludeDefaults && FractionDescriptor . THORNTAIL_GROUP_ID . equals ( s . groupId ( ) ) && ! projectEntry ) { return ; } // 3 . Should this entry be resolved ?
if ( ! resolved || transitive ) { // a . ) Does this entry represent a project dependency ?
if ( projectGAVCoordinates . containsKey ( specGAV ) ) { dependencySet . add ( new DefaultProjectDependency ( ( ProjectInternal ) projectGAVCoordinates . get ( specGAV ) , listener , false ) ) ; } else { DefaultExternalModuleDependency d = new DefaultExternalModuleDependency ( s . groupId ( ) , s . artifactId ( ) , s . version ( ) ) ; DefaultDependencyArtifact da = new DefaultDependencyArtifact ( s . artifactId ( ) , s . type ( ) , s . type ( ) , s . classifier ( ) , null ) ; d . addArtifact ( da ) ; dependencySet . add ( d ) ; } } else { // 4 . Nothing else to do , just add the spec to the result .
result . add ( s ) ; } } ) ; // 5 . Are there any specs that need resolution ?
if ( ! dependencySet . isEmpty ( ) ) { config . getResolvedConfiguration ( ) . getResolvedArtifacts ( ) . stream ( ) . map ( ra -> asDescriptor ( "compile" , ra ) . toArtifactSpec ( ) ) . forEach ( result :: add ) ; } return result ; |
public class CPOptionCategoryLocalServiceWrapper { /** * Returns all the cp option categories matching the UUID and company .
* @ param uuid the UUID of the cp option categories
* @ param companyId the primary key of the company
* @ return the matching cp option categories , or an empty list if no matches were found */
@ Override public java . util . List < com . liferay . commerce . product . model . CPOptionCategory > getCPOptionCategoriesByUuidAndCompanyId ( String uuid , long companyId ) { } } | return _cpOptionCategoryLocalService . getCPOptionCategoriesByUuidAndCompanyId ( uuid , companyId ) ; |
public class clusternodegroup { /** * Use this API to unset the properties of clusternodegroup resources .
* Properties that need to be unset are specified in args array . */
public static base_responses unset ( nitro_service client , String name [ ] , String args [ ] ) throws Exception { } } | base_responses result = null ; if ( name != null && name . length > 0 ) { clusternodegroup unsetresources [ ] = new clusternodegroup [ name . length ] ; for ( int i = 0 ; i < name . length ; i ++ ) { unsetresources [ i ] = new clusternodegroup ( ) ; unsetresources [ i ] . name = name [ i ] ; } result = unset_bulk_request ( client , unsetresources , args ) ; } return result ; |
public class ArrayRankDouble { /** * Get the index position of minimum value in the given array
* @ param array an array
* @ returnindex of the min value in array */
public int getMinValueIndex ( double [ ] array ) { } } | int index = 0 ; double min = Integer . MAX_VALUE ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( array [ i ] < min ) { min = array [ i ] ; index = i ; } } return index ; |
public class Timebase { /** * Parses an encoded timebase . < br
* < p > The following textual representations are valid for time bases : < / p >
* < ul >
* Its inverse as a rational number . The syntax is < i > { denominator } [ : { numerator } ] < / i > , where numerator can be omitted if its
* value is 1.
* < li > A TimeBaseConstant string < / li >
* < li > TimeBaseConstant < / li >
* < / ul >
* The following TimeBaseConstants are currently defined : < / p >
* < table >
* < tr >
* < th > TimeBaseConstant < / th >
* < th > Time base < / th >
* < / tr >
* < tr >
* < td > PAL < / td >
* < td > 25:1 < / td >
* < / tr >
* < tr >
* < td > NTSC or 29.97 < / td >
* < td > 30000:1001 < / td >
* < / tr >
* < tr >
* < td > 23.976 < / td >
* < td > 24000:1001 < / td >
* < / tr >
* < tr >
* < td > 59.94 < / td >
* < td > 60000:1001 < / td >
* < / tr >
* < tr >
* < td > NTSC30 < / td >
* < td > 30:1 < / td >
* < / tr >
* < / table >
* @ param rate
* @ return */
public static Timebase valueOf ( String rate ) { } } | if ( rate . equalsIgnoreCase ( "PAL" ) ) { return HZ_25 ; } else if ( rate . equals ( "23.976" ) ) { return NTSC_24 ; } else if ( rate . equalsIgnoreCase ( "NTSC" ) || rate . equals ( "29.97" ) ) { return NTSC ; } else if ( rate . equals ( "59.94" ) ) { return NTSC_60 ; } else if ( rate . equalsIgnoreCase ( "NTSC30" ) ) { return HZ_30 ; } else { final String [ ] parts = rate . split ( ":" ) ; final int denominator ; final int numerator ; if ( parts . length == 2 ) { numerator = Integer . parseInt ( parts [ 1 ] ) ; } else if ( parts . length == 1 ) { numerator = 1 ; // default to 1 when numerator is not provided
} else { throw new IllegalArgumentException ( "Cannot parse encoded timebase: " + rate ) ; } denominator = Integer . parseInt ( parts [ 0 ] ) ; return new Timebase ( numerator , denominator ) ; } |
public class JavaCCParserImpl { /** * look ahead n tokens . 0 is the current token , 1 is the next token , 2 the
* one after that , etc
* @ param n
* @ return */
private Token lookahead ( int n ) { } } | Token current = token ; for ( int i = 0 ; i < n ; i ++ ) { if ( current . next == null ) { current . next = token_source . getNextToken ( ) ; } current = current . next ; } return current ; |
public class MybatisUsageDao { /** * @ Override
* public List < Usage > getUsageToday ( final Sid accountSid , Usage . Category category , DateTime startDate , DateTime endDate ) {
* return getUsageCalls ( accountSid , category , startDate , endDate , " getTodayCalls " ) ;
* @ Override
* public List < Usage > getUsageYesterday ( final Sid accountSid , Usage . Category category , DateTime startDate , DateTime endDate ) {
* return getUsageCalls ( accountSid , category , startDate , endDate , " getYesterdayCalls " ) ;
* @ Override
* public List < Usage > getUsageThisMonth ( final Sid accountSid , Usage . Category category , DateTime startDate , DateTime endDate ) {
* return getUsageCalls ( accountSid , category , startDate , endDate , " getThisMonthCalls " ) ;
* @ Override
* public List < Usage > getUsageLastMonth ( final Sid accountSid , Usage . Category category , DateTime startDate , DateTime endDate ) {
* return getUsageCalls ( accountSid , category , startDate , endDate , " getLastMonthCalls " ) ; */
private List < Usage > getUsageCalls ( final Sid accountSid , Usage . Category category , DateTime startDate , DateTime endDate , final String queryName ) { } } | return getUsageCalls ( accountSid , category , startDate , endDate , "" , queryName ) ; |
public class JShellToolProvider { /** * Run the jshell tool . The streams { @ code out } and { @ code err } are
* converted to { @ code PrintStream } if they are not already .
* Any { @ code Exception } is caught , printed and results in a non - zero return .
* @ param in command line input ( snippets and commands ) , and execution
* " standard " input ; use System . in if null
* @ param out command line output , feedback including errors , and execution
* " standard " output ; use System . out if null
* @ param err start - up errors and execution " standard " error ; use System . err
* if null
* @ param arguments arguments to pass to the tool
* @ return 0 for success ; nonzero otherwise
* @ throws NullPointerException if the array of arguments contains
* any { @ code null } elements . */
@ Override public int run ( InputStream in , OutputStream out , OutputStream err , String ... arguments ) { } } | InputStream xin = ( in == null ) ? System . in : in ; PrintStream xout = ( out == null ) ? System . out : ( out instanceof PrintStream ) ? ( PrintStream ) out : new PrintStream ( out ) ; PrintStream xerr = ( err == null ) ? System . err : ( err instanceof PrintStream ) ? ( PrintStream ) err : new PrintStream ( err ) ; try { JavaShellToolBuilder . builder ( ) . in ( xin , null ) . out ( xout ) . err ( xerr ) . run ( arguments ) ; return 0 ; } catch ( Throwable ex ) { xerr . println ( ex . getMessage ( ) ) ; return 1 ; } |
public class LocalWorkspaceDataManagerStub { /** * { @ inheritDoc } */
@ Override public List < PropertyData > listChildPropertiesData ( final NodeData parent ) throws RepositoryException { } } | return Collections . unmodifiableList ( super . listChildPropertiesData ( parent ) ) ; |
public class XLogPDescriptor { /** * Gets the doubleBondedCarbonsCount attribute of the XLogPDescriptor object .
* @ param ac Description of the Parameter
* @ param atom Description of the Parameter
* @ return The doubleBondedCarbonsCount value */
private int getDoubleBondedCarbonsCount ( IAtomContainer ac , IAtom atom ) { } } | List < IAtom > neighbours = ac . getConnectedAtomsList ( atom ) ; IBond bond ; int cdbcounter = 0 ; for ( IAtom neighbour : neighbours ) { if ( neighbour . getSymbol ( ) . equals ( "C" ) ) { bond = ac . getBond ( neighbour , atom ) ; if ( bond . getOrder ( ) == IBond . Order . DOUBLE ) { cdbcounter += 1 ; } } } return cdbcounter ; |
public class AmazonSNSClient { /** * Deletes a platform application object for one of the supported push notification services , such as APNS and GCM .
* For more information , see < a href = " https : / / docs . aws . amazon . com / sns / latest / dg / SNSMobilePush . html " > Using Amazon SNS
* Mobile Push Notifications < / a > .
* @ param deletePlatformApplicationRequest
* Input for DeletePlatformApplication action .
* @ return Result of the DeletePlatformApplication operation returned by the service .
* @ throws InvalidParameterException
* Indicates that a request parameter does not comply with the associated constraints .
* @ throws InternalErrorException
* Indicates an internal service error .
* @ throws AuthorizationErrorException
* Indicates that the user has been denied access to the requested resource .
* @ sample AmazonSNS . DeletePlatformApplication
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sns - 2010-03-31 / DeletePlatformApplication " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public DeletePlatformApplicationResult deletePlatformApplication ( DeletePlatformApplicationRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeletePlatformApplication ( request ) ; |
public class IfcFaceBasedSurfaceModelImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcConnectedFaceSet > getFbsmFaces ( ) { } } | return ( EList < IfcConnectedFaceSet > ) eGet ( Ifc2x3tc1Package . Literals . IFC_FACE_BASED_SURFACE_MODEL__FBSM_FACES , true ) ; |
import java . util . * ; class SumAsciiUppercaseCharacters { /** * Function to calculate the sum of the ASCII values of uppercase characters in a input string .
* Args :
* input _ string ( str ) : The string from which to calculate sum of the ASCII values .
* Returns :
* int : The sum of ASCII values of the uppercase characters in the input string .
* Examples :
* sum _ ascii _ uppercase _ characters ( ' ' ) = > 0
* sum _ ascii _ uppercase _ characters ( ' abAB ' ) = > 131
* sum _ ascii _ uppercase _ characters ( ' abcCd ' ) = > 67
* sum _ ascii _ uppercase _ characters ( ' helloE ' ) = > 69
* sum _ ascii _ uppercase _ characters ( ' woArBld ' ) = > 131
* sum _ ascii _ uppercase _ characters ( ' aAaaaXa ' ) = > 153 */
public static int sumAsciiUppercaseCharacters ( String inputString ) { } } | int sum = 0 ; for ( int i = 0 ; i < inputString . length ( ) ; i ++ ) { char c = inputString . charAt ( i ) ; if ( Character . isUpperCase ( c ) ) { sum += ( int ) c ; } } return sum ; |
public class CouchbaseManifestComparator { /** * Save provided data with the key this ManifestComparator manages bump the
* versioned key version .
* NB : To avoid race conditions - make sure only one service ( an elected
* leader ) can write at a time ( see micro - mysql for a mysql distributed
* lock , or micro - curator for a curator / zookeeper distributed lock
* implementation ) .
* @ param data
* to save */
@ Override public void saveAndIncrement ( T data ) { } } | Either < Void , T > oldData = this . data ; VersionedKey newVersionedKey = increment ( ) ; logger . info ( "Saving data with key {}, new version is {}" , key , newVersionedKey . toJson ( ) ) ; connection . put ( newVersionedKey . toJson ( ) , new Data ( data , new Date ( ) , newVersionedKey . toJson ( ) ) ) ; connection . put ( key , newVersionedKey . toJson ( ) ) ; try { this . data = Either . right ( data ) ; delete ( versionedKey ) ; } catch ( Throwable t ) { this . data = oldData ; } finally { versionedKey = newVersionedKey . toJson ( ) ; } |
public class LogInterceptor { /** * 取得list的string , 自定义的主要目的 : 针对value中数组数据的toString处理 , copy from { @ link AbstractCollection }
* @ param result
* @ return */
private String getListResultString ( List result ) { } } | StringBuilder sb = new StringBuilder ( ) ; Iterator i = result . iterator ( ) ; if ( ! i . hasNext ( ) ) { return "[]" ; } sb . append ( '[' ) ; for ( ; ; ) { Object e = i . next ( ) ; // 注意 : 修改为getResultString ( e ) 进行递归处理
sb . append ( e == this ? "(this Collection)" : getResultString ( e ) ) ; if ( ! i . hasNext ( ) ) { return sb . append ( ']' ) . toString ( ) ; } sb . append ( ", " ) ; } |
public class RandomBiomorphMutation { /** * Mutates a single biomorph .
* @ param biomorph The biomorph to mutate .
* @ param rng The source of randomness to use for mutation .
* @ return A mutated version of the biomorph . */
private Biomorph mutateBiomorph ( Biomorph biomorph , Random rng ) { } } | int [ ] genes = biomorph . getGenotype ( ) ; assert genes . length == Biomorph . GENE_COUNT : "Biomorphs must have " + Biomorph . GENE_COUNT + " genes." ; for ( int i = 0 ; i < Biomorph . GENE_COUNT - 1 ; i ++ ) { if ( mutationProbability . nextEvent ( rng ) ) { boolean increase = rng . nextBoolean ( ) ; genes [ i ] += ( increase ? 1 : - 1 ) ; if ( genes [ i ] > Biomorph . GENE_MAX ) { genes [ i ] = Biomorph . GENE_MIN ; } else if ( genes [ i ] < Biomorph . GENE_MIN ) { genes [ i ] = Biomorph . GENE_MAX ; } } } boolean increase = rng . nextBoolean ( ) ; genes [ Biomorph . LENGTH_GENE_INDEX ] += ( increase ? 1 : - 1 ) ; if ( genes [ Biomorph . LENGTH_GENE_INDEX ] > Biomorph . LENGTH_GENE_MAX ) { genes [ Biomorph . LENGTH_GENE_INDEX ] = Biomorph . LENGTH_GENE_MIN ; } else if ( genes [ Biomorph . LENGTH_GENE_INDEX ] < Biomorph . LENGTH_GENE_MIN ) { genes [ Biomorph . LENGTH_GENE_INDEX ] = Biomorph . LENGTH_GENE_MAX ; } return new Biomorph ( genes ) ; |
public class UnixCrypt { /** * / * * * * Methods start here * * * */
private static void transpose ( byte [ ] data , byte [ ] t , int n ) { } } | byte [ ] x = new byte [ 64 ] ; System . arraycopy ( data , 0 , x , 0 , x . length ) ; while ( n -- > 0 ) { data [ n ] = x [ t [ n ] - 1 ] ; } |
public class WSJdbcResultSet { /** * See JDBC 4.0 JavaDoc API for details . */
public SQLXML getSQLXML ( int columnIndex ) throws SQLException { } } | try { SQLXML xml = rsetImpl . getSQLXML ( columnIndex ) ; if ( xml != null && freeResourcesOnClose ) xmls . add ( xml ) ; return xml ; } catch ( SQLException sqlX ) { FFDCFilter . processException ( sqlX , getClass ( ) . getName ( ) + ".getSQLXML" , "2115" , this ) ; throw WSJdbcUtil . mapException ( this , sqlX ) ; } catch ( NullPointerException nullX ) { // No FFDC code needed ; we might be closed .
throw runtimeXIfNotClosed ( nullX ) ; } catch ( AbstractMethodError methError ) { // No FFDC code needed ; wrong JDBC level .
throw AdapterUtil . notSupportedX ( "ResultSet.getSQLXML" , methError ) ; } catch ( RuntimeException runX ) { FFDCFilter . processException ( runX , getClass ( ) . getName ( ) + ".getSQLXML" , "2131" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "getSQLXML" , runX ) ; throw runX ; } catch ( Error err ) { FFDCFilter . processException ( err , getClass ( ) . getName ( ) + ".getSQLXML" , "2138" , this ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "getSQLXML" , err ) ; throw err ; } |
public class AptType { /** * Helper method to return type parameter information */
private String getFormalTypeParameters ( boolean namesOnly ) { } } | Collection < TypeParameterDeclaration > ftColl = _typeDecl . getFormalTypeParameters ( ) ; if ( ftColl . size ( ) == 0 ) return "" ; StringBuffer sb = new StringBuffer ( "<" ) ; boolean isFirst = true ; for ( TypeParameterDeclaration tpDecl : ftColl ) { if ( ! isFirst ) sb . append ( "," ) ; else isFirst = false ; if ( namesOnly ) sb . append ( tpDecl . getSimpleName ( ) ) ; else sb . append ( tpDecl . toString ( ) ) ; } sb . append ( ">" ) ; return sb . toString ( ) ; |
public class HTODInvalidationBuffer { /** * returnToPool boolean is used by GC buffer only */
protected synchronized void remove ( Object id , int bufferType , boolean returnToPool ) { } } | // final String methodName = " remove ( Object , bufferType ) " ;
if ( id == null ) { return ; } if ( bufferType == this . EXPLICIT_BUFFER ) { this . explicitBuffer . remove ( id ) ; } else if ( bufferType == this . SCAN_BUFFER ) { this . scanBuffer . remove ( id ) ; } else if ( bufferType == this . GC_BUFFER ) { if ( id instanceof EvictionTableEntry ) { EvictionTableEntry evt1 = ( EvictionTableEntry ) id ; int i ; for ( i = 0 ; i < garbageCollectorBuffer . size ( ) ; i ++ ) { EvictionTableEntry evt = ( EvictionTableEntry ) garbageCollectorBuffer . get ( i ) ; if ( evt == evt1 ) break ; } if ( i < garbageCollectorBuffer . size ( ) ) this . garbageCollectorBuffer . remove ( i ) ; if ( returnToPool ) { cod . htod . evictionEntryPool . add ( id ) ; } } } // traceDebug ( methodName , " cacheName = " + this . cod . cacheName + " id = " + id + " bufferType = " + bufferType ) ; |
public class Charset { /** * Returns an immutable case - insensitive map from canonical names to { @ code Charset } instances .
* If multiple charsets have the same canonical name , it is unspecified which is returned in
* the map . This method may be slow . If you know which charset you ' re looking for , use
* { @ link # forName } .
* @ return an immutable case - insensitive map from canonical names to { @ code Charset } instances */
public static SortedMap < String , Charset > availableCharsets ( ) { } } | // Start with a copy of the built - in charsets . . .
@ SuppressWarnings ( "unchecked" ) TreeMap < String , Charset > charsets = new TreeMap < String , Charset > ( String . CASE_INSENSITIVE_ORDER ) ; for ( String charsetName : IOSCharset . getAvailableCharsetNames ( ) ) { Charset charset = IOSCharset . charsetForName ( charsetName ) ; charsets . put ( charset . name ( ) , charset ) ; } return Collections . unmodifiableSortedMap ( charsets ) ; |
public class JMFileAppender { /** * Append and close path .
* @ param filePath the file path
* @ param charset the charset
* @ param writingString the writing string
* @ return the path */
public static Path appendAndClose ( String filePath , Charset charset , String writingString ) { } } | return new JMFileAppender ( filePath , charset ) . append ( writingString ) . closeAndGetFilePath ( ) ; |
public class ComponentTag { /** * each attribute set by a tag should use this method for attribute declaration ;
* an existing value with the same key is registered and restored if the tag rendering ends */
protected void setAttribute ( String key , Object value , int scope ) { } } | Map < String , Object > replacedInScope = getReplacedAttributes ( scope ) ; if ( ! replacedInScope . containsKey ( key ) ) { Object current = pageContext . getAttribute ( key , scope ) ; replacedInScope . put ( key , current ) ; } pageContext . setAttribute ( key , value , scope ) ; |
public class YarnSubmissionHelper { /** * Add a file to be localized on the driver .
* @ param resourceName
* @ param resource
* @ return */
public YarnSubmissionHelper addLocalResource ( final String resourceName , final LocalResource resource ) { } } | resources . put ( resourceName , resource ) ; return this ; |
public class EscapeXML { /** * Emit the supplied String to the specified writer , escaping characters if needed .
* @ param src the String to write
* @ param escapeXml if true , escape unsafe characters before writing
* @ param out the JspWriter to emit to
* @ throws IOException if there was a problem emitting the content */
public static void emit ( String src , boolean escapeXml , JspWriter out ) throws IOException { } } | if ( escapeXml ) { emit ( src , out ) ; } else { out . write ( src ) ; } |
public class Recast { /** * / @ see rcAllocCompactHeightfield , rcHeightfield , rcCompactHeightfield , rcConfig */
public static CompactHeightfield buildCompactHeightfield ( Context ctx , int walkableHeight , int walkableClimb , Heightfield hf ) { } } | ctx . startTimer ( "BUILD_COMPACTHEIGHTFIELD" ) ; CompactHeightfield chf = new CompactHeightfield ( ) ; int w = hf . width ; int h = hf . height ; int spanCount = getHeightFieldSpanCount ( ctx , hf ) ; // Fill in header .
chf . width = w ; chf . height = h ; chf . spanCount = spanCount ; chf . walkableHeight = walkableHeight ; chf . walkableClimb = walkableClimb ; chf . maxRegions = 0 ; copy ( chf . bmin , hf . bmin ) ; copy ( chf . bmax , hf . bmax ) ; chf . bmax [ 1 ] += walkableHeight * hf . ch ; chf . cs = hf . cs ; chf . ch = hf . ch ; chf . cells = new CompactCell [ w * h ] ; chf . spans = new CompactSpan [ spanCount ] ; chf . areas = new int [ spanCount ] ; int MAX_HEIGHT = 0xffff ; for ( int i = 0 ; i < chf . cells . length ; i ++ ) { chf . cells [ i ] = new CompactCell ( ) ; } for ( int i = 0 ; i < chf . spans . length ; i ++ ) { chf . spans [ i ] = new CompactSpan ( ) ; } // Fill in cells and spans .
int idx = 0 ; for ( int y = 0 ; y < h ; ++ y ) { for ( int x = 0 ; x < w ; ++ x ) { Span s = hf . spans [ x + y * w ] ; // If there are no spans at this cell , just leave the data to index = 0 , count = 0.
if ( s == null ) continue ; CompactCell c = chf . cells [ x + y * w ] ; c . index = idx ; c . count = 0 ; while ( s != null ) { if ( s . area != RC_NULL_AREA ) { int bot = s . smax ; int top = s . next != null ? ( int ) s . next . smin : MAX_HEIGHT ; chf . spans [ idx ] . y = RecastCommon . clamp ( bot , 0 , 0xffff ) ; chf . spans [ idx ] . h = RecastCommon . clamp ( top - bot , 0 , 0xff ) ; chf . areas [ idx ] = s . area ; idx ++ ; c . count ++ ; } s = s . next ; } } } // Find neighbour connections .
int MAX_LAYERS = RC_NOT_CONNECTED - 1 ; int tooHighNeighbour = 0 ; for ( int y = 0 ; y < h ; ++ y ) { for ( int x = 0 ; x < w ; ++ x ) { CompactCell c = chf . cells [ x + y * w ] ; for ( int i = c . index , ni = c . index + c . count ; i < ni ; ++ i ) { CompactSpan s = chf . spans [ i ] ; for ( int dir = 0 ; dir < 4 ; ++ dir ) { RecastCommon . SetCon ( s , dir , RC_NOT_CONNECTED ) ; int nx = x + RecastCommon . GetDirOffsetX ( dir ) ; int ny = y + RecastCommon . GetDirOffsetY ( dir ) ; // First check that the neighbour cell is in bounds .
if ( nx < 0 || ny < 0 || nx >= w || ny >= h ) continue ; // Iterate over all neighbour spans and check if any of the is
// accessible from current cell .
CompactCell nc = chf . cells [ nx + ny * w ] ; for ( int k = nc . index , nk = nc . index + nc . count ; k < nk ; ++ k ) { CompactSpan ns = chf . spans [ k ] ; int bot = Math . max ( s . y , ns . y ) ; int top = Math . min ( s . y + s . h , ns . y + ns . h ) ; // Check that the gap between the spans is walkable ,
// and that the climb height between the gaps is not too high .
if ( ( top - bot ) >= walkableHeight && Math . abs ( ns . y - s . y ) <= walkableClimb ) { // Mark direction as walkable .
int lidx = k - nc . index ; if ( lidx < 0 || lidx > MAX_LAYERS ) { tooHighNeighbour = Math . max ( tooHighNeighbour , lidx ) ; continue ; } RecastCommon . SetCon ( s , dir , lidx ) ; break ; } } } } } } if ( tooHighNeighbour > MAX_LAYERS ) { throw new RuntimeException ( "rcBuildCompactHeightfield: Heightfield has too many layers " + tooHighNeighbour + " (max: " + MAX_LAYERS + ")" ) ; } ctx . stopTimer ( "BUILD_COMPACTHEIGHTFIELD" ) ; return chf ; |
public class Node { /** * Merge the two entries at the given position . The entries are reordered in
* the < code > entries < / code > array so that the non - empty entries are still
* at the beginning .
* @ param pos1 The position of the first entry to be merged . This position
* has to be smaller than the the second position .
* @ param pos2 The position of the second entry to be merged . This position
* has to be greater than the the first position . */
protected void mergeEntries ( int pos1 , int pos2 ) { } } | assert ( this . numFreeEntries ( ) == 0 ) ; assert ( pos1 < pos2 ) ; this . entries [ pos1 ] . mergeWith ( this . entries [ pos2 ] ) ; for ( int i = pos2 ; i < entries . length - 1 ; i ++ ) { entries [ i ] = entries [ i + 1 ] ; } entries [ entries . length - 1 ] . clear ( ) ; |
public class PostResponseUnmarshaller { /** * { @ inheritDoc } */
@ Override protected void onString ( String text , String fieldName , JsonParser jp ) { } } | log . trace ( fieldName + " " + text ) ; if ( "_id" . equals ( fieldName ) ) { List < java . lang . reflect . Field > fields = ClassUtil . getAnnotatedFields ( entity . getClass ( ) , Id . class ) ; if ( fields . isEmpty ( ) || fields . size ( ) > 1 ) { throw new ApitraryOrmIdException ( "Illegal amount of annotated id properties of class " + entity . getClass ( ) . getName ( ) ) ; } else { ClassUtil . setSilent ( this . entity , fields . get ( 0 ) . getName ( ) , text ) ; } } |
public class Configuration { /** * Set the given property to < code > Pattern < / code > .
* If the pattern is passed as null , sets the empty pattern which results in
* further calls to getPattern ( . . . ) returning the default value .
* @ param name property name
* @ param pattern new value */
public void setPattern ( String name , Pattern pattern ) { } } | if ( null == pattern ) { set ( name , null ) ; } else { set ( name , pattern . pattern ( ) ) ; } |
public class VirtualNetworkGatewaysInner { /** * Resets the primary of the virtual network gateway in the specified resource group .
* @ param resourceGroupName The name of the resource group .
* @ param virtualNetworkGatewayName The name of the virtual network gateway .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VirtualNetworkGatewayInner > beginResetAsync ( String resourceGroupName , String virtualNetworkGatewayName , final ServiceCallback < VirtualNetworkGatewayInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginResetWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayName ) , serviceCallback ) ; |
public class BlockNameSpace { /** * We have the variable : either it was declared here with a type , giving
* it block local scope or an untyped var was explicitly set here via
* setBlockVariable ( ) . */
private boolean weHaveVar ( String name ) { } } | // super . variables . containsKey ( name ) not any faster , I checked
try { return super . getVariableImpl ( name , false ) != null ; } catch ( UtilEvalError e ) { return false ; } |
public class ConfiguredModule { /** * Put a value in the module configuration map .
* @ param key the key .
* @ param value the value . */
@ JsonAnySetter public void putConfigurationValue ( String key , Object value ) { } } | this . configuration . put ( key , value ) ; |
public class SearchHelper { /** * Set the filter to a list of attributes .
* @ param attributes a collection of attribute names */
public void setAttributes ( final Collection < String > attributes ) { } } | this . attributes = attributes == null ? null : Collections . unmodifiableSet ( new HashSet < String > ( attributes ) ) ; |
public class StatFsHelper { /** * Gets the information about the available storage space
* either internal or external depends on the give input
* @ param storageType Internal or external storage type
* @ return available space in bytes , 0 if no information is available */
@ SuppressLint ( "DeprecatedMethod" ) public long getAvailableStorageSpace ( StorageType storageType ) { } } | ensureInitialized ( ) ; maybeUpdateStats ( ) ; StatFs statFS = storageType == StorageType . INTERNAL ? mInternalStatFs : mExternalStatFs ; if ( statFS != null ) { long blockSize , availableBlocks ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN_MR2 ) { blockSize = statFS . getBlockSizeLong ( ) ; availableBlocks = statFS . getAvailableBlocksLong ( ) ; } else { blockSize = statFS . getBlockSize ( ) ; availableBlocks = statFS . getAvailableBlocks ( ) ; } return blockSize * availableBlocks ; } return 0 ; |
public class ScriptableObject { /** * Another version of Get that supports Symbol keyed properties . */
@ Override public Object get ( Symbol key , Scriptable start ) { } } | Slot slot = slotMap . query ( key , 0 ) ; if ( slot == null ) { return Scriptable . NOT_FOUND ; } return slot . getValue ( start ) ; |
public class ImmutableMultisetJsonDeserializer { /** * @ param deserializer { @ link JsonDeserializer } used to deserialize the objects inside the { @ link ImmutableMultiset } .
* @ param < T > Type of the elements inside the { @ link ImmutableMultiset }
* @ return a new instance of { @ link ImmutableMultisetJsonDeserializer } */
public static < T > ImmutableMultisetJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } } | return new ImmutableMultisetJsonDeserializer < T > ( deserializer ) ; |
public class AssemblyFiles { /** * Get the list of all updated entries i . e . all entries which have modification date
* which is newer than the last time check . ATTENTION : As a side effect this method also
* updates the timestamp of entries .
* @ return list of all entries which has been updated since the last call to this method or an empty list */
public List < Entry > getUpdatedEntriesAndRefresh ( ) { } } | List < Entry > ret = new ArrayList < > ( ) ; for ( Entry entry : entries ) { if ( entry . isUpdated ( ) ) { ret . add ( entry ) ; } } return ret ; |
public class ImageDownloader { /** * Submits a new hadoop image download task .
* @ param URL
* The url of the image
* @ param id
* The id of the image ( used to name the image file after download ) */
public void submitHadoopDownloadTask ( String URL , String id ) { } } | Callable < ImageDownloadResult > call = new HadoopImageDownload ( URL , id , followRedirects ) ; pool . submit ( call ) ; numPendingTasks ++ ; |
public class GetRelationalDatabaseLogStreamsResult { /** * An object describing the result of your get relational database log streams request .
* @ param logStreams
* An object describing the result of your get relational database log streams request . */
public void setLogStreams ( java . util . Collection < String > logStreams ) { } } | if ( logStreams == null ) { this . logStreams = null ; return ; } this . logStreams = new java . util . ArrayList < String > ( logStreams ) ; |
public class PeerEurekaNodes { /** * Checks if the given service url contains the current host which is trying
* to replicate . Only after the EIP binding is done the host has a chance to
* identify itself in the list of replica nodes and needs to take itself out
* of replication traffic .
* @ param url the service url of the replica node that the check is made .
* @ return true , if the url represents the current node which is trying to
* replicate , false otherwise . */
public boolean isThisMyUrl ( String url ) { } } | final String myUrlConfigured = serverConfig . getMyUrl ( ) ; if ( myUrlConfigured != null ) { return myUrlConfigured . equals ( url ) ; } return isInstanceURL ( url , applicationInfoManager . getInfo ( ) ) ; |
public class MSNumpress { public static double optimalSlofFixedPoint ( double [ ] data , int dataSize ) { } } | if ( dataSize == 0 ) { return 0 ; } double maxDouble = 1 ; double x ; double fp ; for ( int i = 0 ; i < dataSize ; i ++ ) { x = Math . log ( data [ i ] + 1 ) ; maxDouble = Math . max ( maxDouble , x ) ; } fp = Math . floor ( 0xFFFF / maxDouble ) ; return fp ; |
public class LogInFormView { /** * Triggers the back action on the form .
* @ return true if it was handled , false otherwise */
public boolean onBackPressed ( ) { } } | if ( corporateSSO ) { Log . d ( TAG , "Removing the SSO Login Form, going back to the Username/Password Form." ) ; resetDomain ( ) ; showSSOMessage ( true ) ; return true ; } return false ; |
public class TaskDeleteOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has been modified since the specified time .
* @ param ifModifiedSince the ifModifiedSince value to set
* @ return the TaskDeleteOptions object itself . */
public TaskDeleteOptions withIfModifiedSince ( DateTime ifModifiedSince ) { } } | if ( ifModifiedSince == null ) { this . ifModifiedSince = null ; } else { this . ifModifiedSince = new DateTimeRfc1123 ( ifModifiedSince ) ; } return this ; |
public class ns_ns_savedconfig { /** * Use this API to fetch filtered set of ns _ ns _ savedconfig resources .
* filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */
public static ns_ns_savedconfig [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | ns_ns_savedconfig obj = new ns_ns_savedconfig ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; ns_ns_savedconfig [ ] response = ( ns_ns_savedconfig [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class JmxClient { /** * Return a set of the various bean ObjectName objects associated with the Jmx server . */
public Set < ObjectName > getBeanNames ( ) throws JMException { } } | checkClientConnected ( ) ; try { return mbeanConn . queryNames ( null , null ) ; } catch ( IOException e ) { throw createJmException ( "Problems querying for jmx bean names: " + e , e ) ; } |
public class Chunk { /** * Set the element in a chunk as missing given a 0 - based chunk local index . */
public final boolean setNA0 ( int idx ) { } } | setWrite ( ) ; if ( _chk2 . setNA_impl ( idx ) ) return true ; ( _chk2 = inflate_impl ( new NewChunk ( this ) ) ) . setNA_impl ( idx ) ; return true ; |
public class NotesApi { /** * Gets a Stream of all notes for a single merge request
* < pre > < code > GitLab Endpoint : GET / projects / : id / merge _ requests / : merge _ request _ iid / notes < / code > < / pre >
* @ param projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance
* @ param mergeRequestIid the issue ID to get the notes for
* @ return a Stream of the merge request ' s notes
* @ throws GitLabApiException if any exception occurs */
public Stream < Note > getMergeRequestNotesStream ( Object projectIdOrPath , Integer mergeRequestIid ) throws GitLabApiException { } } | return ( getMergeRequestNotes ( projectIdOrPath , mergeRequestIid , null , null , getDefaultPerPage ( ) ) . stream ( ) ) ; |
public class ReplicationInstanceMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ReplicationInstance replicationInstance , ProtocolMarshaller protocolMarshaller ) { } } | if ( replicationInstance == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( replicationInstance . getReplicationInstanceIdentifier ( ) , REPLICATIONINSTANCEIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstanceClass ( ) , REPLICATIONINSTANCECLASS_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstanceStatus ( ) , REPLICATIONINSTANCESTATUS_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getAllocatedStorage ( ) , ALLOCATEDSTORAGE_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getInstanceCreateTime ( ) , INSTANCECREATETIME_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getVpcSecurityGroups ( ) , VPCSECURITYGROUPS_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getAvailabilityZone ( ) , AVAILABILITYZONE_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationSubnetGroup ( ) , REPLICATIONSUBNETGROUP_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getPreferredMaintenanceWindow ( ) , PREFERREDMAINTENANCEWINDOW_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getPendingModifiedValues ( ) , PENDINGMODIFIEDVALUES_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getMultiAZ ( ) , MULTIAZ_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getEngineVersion ( ) , ENGINEVERSION_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getAutoMinorVersionUpgrade ( ) , AUTOMINORVERSIONUPGRADE_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getKmsKeyId ( ) , KMSKEYID_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstanceArn ( ) , REPLICATIONINSTANCEARN_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstancePublicIpAddress ( ) , REPLICATIONINSTANCEPUBLICIPADDRESS_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstancePrivateIpAddress ( ) , REPLICATIONINSTANCEPRIVATEIPADDRESS_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstancePublicIpAddresses ( ) , REPLICATIONINSTANCEPUBLICIPADDRESSES_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getReplicationInstancePrivateIpAddresses ( ) , REPLICATIONINSTANCEPRIVATEIPADDRESSES_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getPubliclyAccessible ( ) , PUBLICLYACCESSIBLE_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getSecondaryAvailabilityZone ( ) , SECONDARYAVAILABILITYZONE_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getFreeUntil ( ) , FREEUNTIL_BINDING ) ; protocolMarshaller . marshall ( replicationInstance . getDnsNameServers ( ) , DNSNAMESERVERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class TransactionInterceptor { /** * The interception method . The method should call { @ link org . wisdom . api . interception . RequestContext # proceed ( ) }
* to call the next interception . Without this call it cuts the chain .
* @ param configuration the interception configuration
* @ param context the interception context
* @ return the result
* @ throws Exception if anything bad happen */
@ Override public Result call ( Transactional configuration , RequestContext context ) throws Exception { } } | propagation . onEntry ( configuration . propagation ( ) , configuration . timeout ( ) , context . route ( ) . getControllerMethod ( ) . getName ( ) ) ; try { Result result = context . proceed ( ) ; propagation . onExit ( configuration . propagation ( ) , context . route ( ) . getControllerMethod ( ) . getName ( ) , null ) ; return result ; } catch ( Exception e ) { propagation . onError ( e , configuration . propagation ( ) , configuration . noRollbackFor ( ) , configuration . rollbackOnlyFor ( ) , context . route ( ) . getControllerMethod ( ) . getName ( ) , null ) ; throw e ; } |
public class JMElasticsearchIndex { /** * Send data with object mapper string .
* @ param sourceObject the source object
* @ param index the index
* @ param type the type
* @ return the string */
public String sendDataWithObjectMapper ( Object sourceObject , String index , String type ) { } } | return sendDataWithObjectMapper ( sourceObject , index , type , null ) . getId ( ) ; |
import java . util . * ; class RabbitFeed { /** * This function calculates the total number of carrots a rabbit consumes ,
* and the remaining carrots left if the rabbit is hungry .
* Example :
* rabbit _ feed ( 5 , 6 , 10 ) - > [ 11 , 4]
* rabbit _ feed ( 4 , 8 , 9 ) - > [ 12 , 1]
* rabbit _ feed ( 1 , 10 , 10 ) - > [ 11 , 0]
* rabbit _ feed ( 2 , 11 , 5 ) - > [ 7 , 0]
* Variables :
* @ consumed : integer
* the number of carrots the rabbit has already eaten .
* @ required : integer
* the number of carrots the rabbit needs to consume .
* @ available : integer
* the number of available carrots in stock .
* Condition :
* 0 < = consumed < = 1000
* 0 < = required < = 1000
* 0 < = available < = 1000 */
public static List < Integer > rabbitFeed ( int consumed , int required , int available ) { } } | if ( required <= available ) { return Arrays . asList ( consumed + required , available - required ) ; } else { return Arrays . asList ( consumed + available , 0 ) ; } |
public class Logger { /** * verbose .
* @ param msg the msg
* @ param args the args */
public static void verbose ( String msg , Object ... args ) { } } | String tag = generateTag ( ) ; if ( ! "" . equals ( tag ) ) { if ( args . length > 0 ) { Log . v ( tag , String . format ( msg , args ) ) ; } else { Log . v ( tag , msg ) ; } } |
public class Compiler { /** * Execute the XPath object from a given opcode position .
* @ param opPos The current position in the xpath . m _ opMap array .
* @ return The result of the XPath .
* @ throws TransformerException if there is a syntax or other error .
* @ xsl . usage advanced */
public Expression compile ( int opPos ) throws TransformerException { } } | int op = getOp ( opPos ) ; Expression expr = null ; // System . out . println ( getPatternString ( ) + " op : " + op ) ;
switch ( op ) { case OpCodes . OP_XPATH : expr = compile ( opPos + 2 ) ; break ; case OpCodes . OP_OR : expr = or ( opPos ) ; break ; case OpCodes . OP_AND : expr = and ( opPos ) ; break ; case OpCodes . OP_NOTEQUALS : expr = notequals ( opPos ) ; break ; case OpCodes . OP_EQUALS : expr = equals ( opPos ) ; break ; case OpCodes . OP_LTE : expr = lte ( opPos ) ; break ; case OpCodes . OP_LT : expr = lt ( opPos ) ; break ; case OpCodes . OP_GTE : expr = gte ( opPos ) ; break ; case OpCodes . OP_GT : expr = gt ( opPos ) ; break ; case OpCodes . OP_PLUS : expr = plus ( opPos ) ; break ; case OpCodes . OP_MINUS : expr = minus ( opPos ) ; break ; case OpCodes . OP_MULT : expr = mult ( opPos ) ; break ; case OpCodes . OP_DIV : expr = div ( opPos ) ; break ; case OpCodes . OP_MOD : expr = mod ( opPos ) ; break ; // case OpCodes . OP _ QUO :
// expr = quo ( opPos ) ; break ;
case OpCodes . OP_NEG : expr = neg ( opPos ) ; break ; case OpCodes . OP_STRING : expr = string ( opPos ) ; break ; case OpCodes . OP_BOOL : expr = bool ( opPos ) ; break ; case OpCodes . OP_NUMBER : expr = number ( opPos ) ; break ; case OpCodes . OP_UNION : expr = union ( opPos ) ; break ; case OpCodes . OP_LITERAL : expr = literal ( opPos ) ; break ; case OpCodes . OP_VARIABLE : expr = variable ( opPos ) ; break ; case OpCodes . OP_GROUP : expr = group ( opPos ) ; break ; case OpCodes . OP_NUMBERLIT : expr = numberlit ( opPos ) ; break ; case OpCodes . OP_ARGUMENT : expr = arg ( opPos ) ; break ; case OpCodes . OP_EXTFUNCTION : expr = compileExtension ( opPos ) ; break ; case OpCodes . OP_FUNCTION : expr = compileFunction ( opPos ) ; break ; case OpCodes . OP_LOCATIONPATH : expr = locationPath ( opPos ) ; break ; case OpCodes . OP_PREDICATE : expr = null ; break ; // should never hit this here .
case OpCodes . OP_MATCHPATTERN : expr = matchPattern ( opPos + 2 ) ; break ; case OpCodes . OP_LOCATIONPATHPATTERN : expr = locationPathPattern ( opPos ) ; break ; case OpCodes . OP_QUO : error ( XPATHErrorResources . ER_UNKNOWN_OPCODE , new Object [ ] { "quo" } ) ; // " ERROR ! Unknown op code : " + m _ opMap [ opPos ] ) ;
break ; default : error ( XPATHErrorResources . ER_UNKNOWN_OPCODE , new Object [ ] { Integer . toString ( getOp ( opPos ) ) } ) ; // " ERROR ! Unknown op code : " + m _ opMap [ opPos ] ) ;
} // if ( null ! = expr )
// expr . setSourceLocator ( m _ locator ) ;
return expr ; |
public class Log { /** * Send a { @ link # VERBOSE } log message .
* @ param msg
* The message you would like logged . */
public static int v ( String msg ) { } } | // This is a quick check to avoid the expensive stack trace reflection .
if ( ! activated ) { return 0 ; } String caller = LogHelper . getCaller ( ) ; if ( caller != null ) { return v ( caller , msg ) ; } return 0 ; |
public class ClassReader { /** * Read name . */
Name readName ( int i ) { } } | Object obj = readPool ( i ) ; if ( obj != null && ! ( obj instanceof Name ) ) throw badClassFile ( "bad.const.pool.entry" , currentClassFile . toString ( ) , "CONSTANT_Utf8_info or CONSTANT_String_info" , i ) ; return ( Name ) obj ; |
public class ValidatingStreamReader { /** * This method gets called to handle remainder of DOCTYPE declaration ,
* essentially the optional internal subset . Internal subset , if such
* exists , is always read , but whether its contents are added to the
* read buffer depend on passed - in argument .
* NOTE : Since this method overrides the default implementation , make
* sure you do NOT change the method signature .
* @ param copyContents If true , will copy contents of the internal
* subset of DOCTYPE declaration
* in the text buffer ( in addition to parsing it for actual use ) ; if
* false , will only do parsing . */
@ Override protected void finishDTD ( boolean copyContents ) throws XMLStreamException { } } | if ( ! hasConfigFlags ( CFG_SUPPORT_DTD ) ) { super . finishDTD ( copyContents ) ; return ; } /* We know there are no spaces , as this char was read and pushed
* back earlier . . . */
char c = getNextChar ( SUFFIX_IN_DTD ) ; DTDSubset intSubset = null ; /* Do we have an internal subset ? Note that we have earlier checked
* that it has to be either ' [ ' or closing ' > ' . */
if ( c == '[' ) { // Do we need to copy the contents of int . subset in the buffer ?
if ( copyContents ) { ( ( BranchingReaderSource ) mInput ) . startBranch ( mTextBuffer , mInputPtr , mNormalizeLFs ) ; } try { intSubset = FullDTDReader . readInternalSubset ( this , mInput , mConfig , hasConfigFlags ( CFG_VALIDATE_AGAINST_DTD ) , mDocXmlVersion ) ; } finally { /* Let ' s close branching in any and every case ( may allow
* graceful recovery in error cases in future */
if ( copyContents ) { /* Need to " push back " ' ] ' got in the succesful case
* ( that ' s - 1 part below ) ;
* in error case it ' ll just be whatever last char was . */
( ( BranchingReaderSource ) mInput ) . endBranch ( mInputPtr - 1 ) ; } } // And then we need closing ' > '
c = getNextCharAfterWS ( SUFFIX_IN_DTD_INTERNAL ) ; } if ( c != '>' ) { throwUnexpectedChar ( c , "; expected '>' to finish DOCTYPE declaration." ) ; } /* But , then , we also may need to read the external subset , if
* one was defined : */
/* 19 - Sep - 2004 , TSa : That does not need to be done , however , if
* there ' s a DTD override set . */
mDTD = mConfig . getDTDOverride ( ) ; if ( mDTD != null ) { // We have earlier override that ' s already parsed
} else { // Nope , no override
DTDSubset extSubset = null ; /* 05 - Mar - 2006 , TSa : If standalone was specified as " yes " , we
* should not rely on any external declarations , so shouldn ' t
* we really just skip the external subset ? */
/* Alas : SAX ( Xerces ) still tries to read it . . . should we
* do the Right Thing , or follow the leader ? For now , let ' s
* just follow the wrong example . */
// if ( mDocStandalone ! = DOC _ STANDALONE _ YES ) {
if ( true ) { if ( mDtdPublicId != null || mDtdSystemId != null ) { extSubset = findDtdExtSubset ( mDtdPublicId , mDtdSystemId , intSubset ) ; } } if ( intSubset == null ) { mDTD = extSubset ; } else if ( extSubset == null ) { mDTD = intSubset ; } else { mDTD = intSubset . combineWithExternalSubset ( this , extSubset ) ; } } if ( mDTD == null ) { // only if specifically overridden not to have any
mGeneralEntities = null ; } else { if ( mDTD instanceof DTDSubset ) { mGeneralEntities = ( ( DTDSubset ) mDTD ) . getGeneralEntityMap ( ) ; } else { /* Also , let ' s warn if using non - native DTD implementation ,
* since entities and notations can not be accessed */
_reportProblem ( mConfig . getXMLReporter ( ) , ErrorConsts . WT_DT_DECL , "Value to set for property '" + XMLInputFactory2 . P_DTD_OVERRIDE + "' not a native Woodstox DTD implementation (but " + mDTD . getClass ( ) + "): can not access full entity or notation information" , null ) ; } /* 16 - Jan - 2006 , TSa : Actually , we have both fully - validating mode ,
* and non - validating - but - DTD - aware mode . In latter case , we ' ll
* still need to add a validator , but just to get type info
* and to add attribute default values if necessary . */
mAutoDtdValidator = mDTD . createValidator ( /* ( ValidationContext ) */
mElementStack ) ; mDtdValidatorSet = true ; // so we won ' t get nags
NsDefaultProvider nsDefs = null ; if ( mAutoDtdValidator instanceof DTDValidatorBase ) { DTDValidatorBase dtdv = ( DTDValidatorBase ) mAutoDtdValidator ; dtdv . setAttrValueNormalization ( true ) ; // Do we have any attribute defaults for ' xmlns ' or ' xmlns : * ' ?
if ( dtdv . hasNsDefaults ( ) ) { nsDefs = dtdv ; } } mElementStack . setAutomaticDTDValidator ( mAutoDtdValidator , nsDefs ) ; } |
public class StrBuilder { /** * If possible , reads chars from the provided { @ link Readable } directly into underlying
* character buffer without making extra copies .
* @ param readable object to read from
* @ return the number of characters read
* @ throws IOException if an I / O error occurs
* @ since 3.4
* @ see # appendTo ( Appendable ) */
@ GwtIncompatible ( "incompatible method" ) public int readFrom ( final Readable readable ) throws IOException { } } | final int oldSize = size ; if ( readable instanceof Reader ) { final Reader r = ( Reader ) readable ; ensureCapacity ( size + 1 ) ; int read ; while ( ( read = r . read ( buffer , size , buffer . length - size ) ) != - 1 ) { size += read ; ensureCapacity ( size + 1 ) ; } } else if ( readable instanceof CharBuffer ) { final CharBuffer cb = ( CharBuffer ) readable ; final int remaining = cb . remaining ( ) ; ensureCapacity ( size + remaining ) ; cb . get ( buffer , size , remaining ) ; size += remaining ; } else { while ( true ) { ensureCapacity ( size + 1 ) ; final CharBuffer buf = CharBuffer . wrap ( buffer , size , buffer . length - size ) ; final int read = readable . read ( buf ) ; if ( read == - 1 ) { break ; } size += read ; } } return size - oldSize ; |
public class AbstractRequestContext { /** * Make an EntityProxy immutable . */
private void makeImmutable ( final AutoBean < ? extends BaseProxy > toMutate ) { } } | // Always diff ' ed against itself , producing a no - op
toMutate . setTag ( Constants . PARENT_OBJECT , toMutate ) ; // Act with entity - identity semantics
toMutate . setTag ( REQUEST_CONTEXT_STATE , null ) ; toMutate . setFrozen ( true ) ; |
public class GlobalUsersInner { /** * Starts an environment by starting all resources inside the environment . This operation can take a while to complete .
* @ param userName The name of the user .
* @ param environmentId The resourceId of the environment
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < ServiceResponse < Void > > beginStartEnvironmentWithServiceResponseAsync ( String userName , String environmentId ) { } } | if ( userName == null ) { throw new IllegalArgumentException ( "Parameter userName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( environmentId == null ) { throw new IllegalArgumentException ( "Parameter environmentId is required and cannot be null." ) ; } EnvironmentOperationsPayload environmentOperationsPayload = new EnvironmentOperationsPayload ( ) ; environmentOperationsPayload . withEnvironmentId ( environmentId ) ; return service . beginStartEnvironment ( userName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , environmentOperationsPayload , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = beginStartEnvironmentDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class HourRanges { /** * Returns a compressed version of the
* @ return */
public final HourRanges compress ( ) { } } | final List < HourRanges > normalized = normalize ( ) ; if ( normalized . size ( ) == 1 ) { return valueOf ( normalized . get ( 0 ) . toMinutes ( ) ) ; } else if ( normalized . size ( ) == 2 ) { final HourRanges firstDay = valueOf ( normalized . get ( 0 ) . toMinutes ( ) ) ; final HourRanges secondDay = normalized . get ( 1 ) ; if ( secondDay . ranges . size ( ) != 1 ) { throw new IllegalStateException ( "Expected exactly 1 hour range for the seond day, but was " + secondDay . ranges . size ( ) + ": " + secondDay ) ; } final HourRange seondDayHR = secondDay . ranges . get ( 0 ) ; final List < HourRange > newRanges = new ArrayList < > ( ) ; int lastIdx = 0 ; if ( firstDay . ranges . size ( ) > 1 ) { lastIdx = firstDay . ranges . size ( ) - 1 ; newRanges . addAll ( firstDay . ranges . subList ( 0 , lastIdx ) ) ; } final HourRange firstDayHR = firstDay . ranges . get ( lastIdx ) ; newRanges . add ( firstDayHR . joinWithNextDay ( seondDayHR ) ) ; return new HourRanges ( newRanges . toArray ( new HourRange [ newRanges . size ( ) ] ) ) ; } else { throw new IllegalStateException ( "Normalized hour ranges returned an unexpected number of elements (" + normalized . size ( ) + "): " + normalized ) ; } |
public class ZipUtil { /** * Unpacks a ZIP file to the given directory using a specific Charset
* for the input file .
* The output directory must not be a file .
* @ param zip
* input ZIP file .
* @ param outputDir
* output directory ( created automatically if not found ) .
* @ param charset
* charset used to unpack the zip file */
public static void unpack ( File zip , final File outputDir , Charset charset ) { } } | unpack ( zip , outputDir , IdentityNameMapper . INSTANCE , charset ) ; |
public class Serializer { /** * deserializes DBBinary / byte [ ] to object
* @ param data the data to read
* @ param zipped true if the data is compressed
* @ return the deserialized object
* @ throws IOException thrown when an error is encountered reading the data
* @ throws ClassNotFoundException thrown if the Class definition can not be found */
public static Object deserialize ( final Object data , final boolean zipped ) throws IOException , ClassNotFoundException { } } | final ByteArrayInputStream bais ; if ( data instanceof Binary ) { bais = new ByteArrayInputStream ( ( ( Binary ) data ) . getData ( ) ) ; } else { bais = new ByteArrayInputStream ( ( byte [ ] ) data ) ; } InputStream is = bais ; try { if ( zipped ) { is = new GZIPInputStream ( is ) ; } final ObjectInputStream ois = new ObjectInputStream ( is ) ; return ois . readObject ( ) ; } finally { is . close ( ) ; } |
public class Comparables { /** * Safely compares two values that might be null . Null value is considered lower than non - null , even if the
* non - null value is minimal in its range .
* @ return comparison result of first and second value . */
public static < Value extends Comparable < Value > > int nullSafeCompare ( final Value first , final Value second ) { } } | if ( first == null ) { return second == null ? EQUAL_COMPARE_RESULT : LOWER_THAN_COMPARE_RESULT ; } return second == null ? GREATER_THAN_COMPARE_RESULT : first . compareTo ( second ) ; |
public class SVGBuilder { /** * Parse SVG data from an Android application asset .
* @ param assetMngr the Android asset manager .
* @ param svgPath the path to the SVG file in the application ' s assets .
* @ throws IOException if there was a problem reading the file . */
public SVGBuilder readFromAsset ( AssetManager assetMngr , String svgPath ) throws IOException { } } | this . data = assetMngr . open ( svgPath ) ; return this ; |
public class Packer { /** * Add fill = VERTICAL , weighty = wty to the constraints for the current
* component . */
public Packer filly ( final double wty ) { } } | gc . fill = GridBagConstraints . VERTICAL ; gc . weighty = wty ; setConstraints ( comp , gc ) ; return this ; |
public class SQLiteModelMethod { /** * Gets the adapter for param .
* @ param paramName
* the param name
* @ return the adapter for param */
public TypeName getAdapterForParam ( String paramName ) { } } | if ( this . hasAdapterForParam ( paramName ) ) { return TypeUtility . typeName ( this . parameterName2Adapter . get ( paramName ) ) ; } else { return null ; } |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MeasureAttributeType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link MeasureAttributeType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/generics/2.0" , name = "measureAttribute" , substitutionHeadNamespace = "http://www.opengis.net/citygml/generics/2.0" , substitutionHeadName = "_genericAttribute" ) public JAXBElement < MeasureAttributeType > createMeasureAttribute ( MeasureAttributeType value ) { } } | return new JAXBElement < MeasureAttributeType > ( _MeasureAttribute_QNAME , MeasureAttributeType . class , null , value ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.