signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ElementMatchers { /** * Matches { @ link MethodDescription } s that return a given erasure type . * @ param type The raw type the matched method is expected to return . * @ param < T > The type of the matched object . * @ return An element matcher that matches a given return type for a method description . */ public static < T extends MethodDescription > ElementMatcher . Junction < T > returns ( Class < ? > type ) { } }
return returnsGeneric ( erasure ( type ) ) ;
public class CmsEditModuleForm { /** * Adds a new resource selection widget to the list of module resources . < p > * @ param moduleResource the initial value for the new widget */ void addExcludedResource ( String moduleResource ) { } }
CmsModuleResourceSelectField resField = createModuleResourceField ( moduleResource ) ; if ( resField != null ) { m_excludedResourcesGroup . addRow ( resField ) ; }
public class HeaderMap { /** * contains */ public boolean contains ( HttpString headerName ) { } }
final HeaderValues headerValues = getEntry ( headerName ) ; if ( headerValues == null ) { return false ; } final Object v = headerValues . value ; if ( v instanceof String ) { return true ; } final String [ ] list = ( String [ ] ) v ; for ( int i = 0 ; i < list . length ; i ++ ) { if ( list [ i ] != null ) { return true ; } } return false ;
public class Matrix4f { /** * Compute a normal matrix from the upper left 3x3 submatrix of < code > this < / code > * and store it into the upper left 3x3 submatrix of < code > dest < / code > . * All other values of < code > dest < / code > will be set to { @ link # identity ( ) identity } . * The normal matrix of < code > m < / code > is the transpose of the inverse of < code > m < / code > . * Please note that , if < code > this < / code > is an orthogonal matrix or a matrix whose columns are orthogonal vectors , * then this method < i > need not < / i > be invoked , since in that case < code > this < / code > itself is its normal matrix . * In that case , use { @ link # set3x3 ( Matrix4f ) } to set a given Matrix4f to only the upper left 3x3 submatrix * of this matrix . * @ see # set3x3 ( Matrix4f ) * @ param dest * will hold the result * @ return dest */ public Matrix4f normal ( Matrix4f dest ) { } }
if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . identity ( ) ; else if ( ( properties & PROPERTY_ORTHONORMAL ) != 0 ) return normalOrthonormal ( dest ) ; return normalGeneric ( dest ) ;
public class GeometryRendererImpl { /** * Used in creating the " edges " , " selection " and " vertices " groups for LineStrings and LinearRings . */ private Composite getOrCreateGroup ( Object parent , String name ) { } }
if ( groups . containsKey ( name ) ) { return groups . get ( name ) ; } Composite group = new Composite ( name ) ; mapWidget . getVectorContext ( ) . drawGroup ( parent , group ) ; groups . put ( name , group ) ; return group ;
public class InMemoryCacheEntry { /** * / * ( non - Javadoc ) * @ see com . gistlabs . mechanize . cache . InMemoryCacheEntry # prepareConditionalGet ( org . apache . http . client . methods . HttpUriRequest ) */ @ Override public void prepareConditionalGet ( final HttpUriRequest newRequest ) { } }
transferFirstHeader ( "ETag" , "If-None-Match" , this . response , newRequest ) ; transferFirstHeader ( "Last-Modified" , "If-Modified-Since" , this . response , newRequest ) ;
public class Settings { /** * Get a property by name * @ param key the property name */ public int getIntProperty ( String key , int defaultValue ) { } }
String value = SystemTools . replaceSystemProperties ( settings . getProperty ( key ) ) ; if ( value == null ) return defaultValue ; try { return Integer . parseInt ( value ) ; } catch ( Exception e ) { return defaultValue ; }
public class NotificationPacket { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . network . packet . AbstractPacket # unserializeFrom ( net . timewalker . ffmq4 . utils . RawDataInputStream ) */ @ Override protected void unserializeFrom ( RawDataBuffer in ) { } }
super . unserializeFrom ( in ) ; sessionId = new IntegerID ( in . readInt ( ) ) ; consumerId = new IntegerID ( in . readInt ( ) ) ; message = MessageSerializer . unserializeFrom ( in , false ) ; donePrefetching = in . readBoolean ( ) ;
public class CommentUtil { /** * Returns the first comment found for the given docx object . Note that an object is * only considered commented if the comment STARTS within the object . Comments * spanning several objects are not supported by this method . * @ param object the object whose comment to load . * @ param document the document in which the object is embedded ( needed to load the * comment from the comments . xml part ) . * @ return the concatenated string of all paragraphs of text within the comment or * null if the specified object is not commented . * @ throws Docx4JException in case of a Docx4J processing error . */ public static Comments . Comment getCommentFor ( ContentAccessor object , WordprocessingMLPackage document ) { } }
try { for ( Object contentObject : object . getContent ( ) ) { if ( contentObject instanceof CommentRangeStart ) { try { BigInteger id = ( ( CommentRangeStart ) contentObject ) . getId ( ) ; CommentsPart commentsPart = ( CommentsPart ) document . getParts ( ) . get ( new PartName ( "/word/comments.xml" ) ) ; Comments comments = commentsPart . getContents ( ) ; for ( Comments . Comment comment : comments . getComment ( ) ) { if ( comment . getId ( ) . equals ( id ) ) { return comment ; } } } catch ( InvalidFormatException e ) { logger . warn ( String . format ( "Error while searching comment. Skipping object %s." , object ) , e ) ; } } } return null ; } catch ( Docx4JException e ) { throw new DocxStamperException ( "error accessing the comments of the document!" , e ) ; }
public class AnnotationFunctionImportFactory { /** * Add function import to builder of this factory by specified class . * @ param cls function import class */ public void addFunctionImport ( Class < ? > cls ) { } }
EdmFunctionImport functionImportAnnotation = cls . getAnnotation ( EdmFunctionImport . class ) ; FunctionImportImpl . Builder functionImportBuilder = new FunctionImportImpl . Builder ( ) . setEntitySetName ( functionImportAnnotation . entitySet ( ) ) . setFunctionName ( functionImportAnnotation . namespace ( ) + "." + functionImportAnnotation . function ( ) ) . setIncludeInServiceDocument ( functionImportAnnotation . includeInServiceDocument ( ) ) . setName ( functionImportAnnotation . name ( ) ) . setJavaClass ( cls ) ; functionImportBuilders . add ( functionImportBuilder ) ;
public class PGPRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setSHside ( Integer newSHside ) { } }
Integer oldSHside = sHside ; sHside = newSHside ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . PGPRG__SHSIDE , oldSHside , sHside ) ) ;
public class JcValue { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > return the receiver as a JcString < / i > < / div > * < br / > */ public JcString asString ( ) { } }
JcString ret = new JcString ( null , this , null ) ; QueryRecorder . recordInvocationConditional ( this , "asString" , ret ) ; return ret ;
public class ArrayList { /** * Removes from this list all of the elements whose index is between * { @ code fromIndex } , inclusive , and { @ code toIndex } , exclusive . * Shifts any succeeding elements to the left ( reduces their index ) . * This call shortens the list by { @ code ( toIndex - fromIndex ) } elements . * ( If { @ code toIndex = = fromIndex } , this operation has no effect . ) * @ throws IndexOutOfBoundsException if { @ code fromIndex } or * { @ code toIndex } is out of range * ( { @ code fromIndex < 0 | | * fromIndex > = size ( ) | | * toIndex > size ( ) | | * toIndex < fromIndex } ) */ protected void removeRange ( int fromIndex , int toIndex ) { } }
// Android - changed : Throw an IOOBE if toIndex < fromIndex as documented . // All the other cases ( negative indices , or indices greater than the size // will be thrown by System # arrayCopy . if ( toIndex < fromIndex ) { throw new IndexOutOfBoundsException ( "toIndex < fromIndex" ) ; } modCount ++ ; int numMoved = size - toIndex ; System . arraycopy ( elementData , toIndex , elementData , fromIndex , numMoved ) ; // clear to let GC do its work int newSize = size - ( toIndex - fromIndex ) ; for ( int i = newSize ; i < size ; i ++ ) { elementData [ i ] = null ; } size = newSize ;
public class IOUtils { /** * Serializes the given object value to an output stream , and close the output stream . * @ param value object value to serialize * @ param outputStream output stream to serialize into * @ since 1.16 */ public static void serialize ( Object value , OutputStream outputStream ) throws IOException { } }
try { new ObjectOutputStream ( outputStream ) . writeObject ( value ) ; } finally { outputStream . close ( ) ; }
public class OAuthCredentialsCache { /** * Refreshes the OAuth2 token asynchronously . This method will only start an async refresh if * there isn ' t a currently running asynchronous refresh and the current token is not " Good " . */ Future < HeaderCacheElement > asyncRefresh ( ) { } }
LOG . trace ( "asyncRefresh" ) ; synchronized ( lock ) { try { if ( futureToken != null ) { return futureToken ; } if ( headerCache . getCacheState ( ) == CacheState . Good ) { return Futures . immediateFuture ( headerCache ) ; } Future < HeaderCacheElement > future = executor . submit ( new Callable < HeaderCacheElement > ( ) { @ Override public HeaderCacheElement call ( ) { return updateToken ( ) ; } } ) ; if ( ! future . isDone ( ) ) { this . futureToken = future ; } return future ; } catch ( RuntimeException e ) { futureToken = null ; LOG . warn ( "Got an unexpected exception while trying to refresh google credentials." , e ) ; return Futures . immediateFuture ( new HeaderCacheElement ( Status . UNAUTHENTICATED . withDescription ( "Unexpected error trying to authenticate" ) . withCause ( e ) ) ) ; } }
public class ResourceChangeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ResourceChange resourceChange , ProtocolMarshaller protocolMarshaller ) { } }
if ( resourceChange == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resourceChange . getAction ( ) , ACTION_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getLogicalResourceId ( ) , LOGICALRESOURCEID_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getPhysicalResourceId ( ) , PHYSICALRESOURCEID_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getReplacement ( ) , REPLACEMENT_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getScope ( ) , SCOPE_BINDING ) ; protocolMarshaller . marshall ( resourceChange . getDetails ( ) , DETAILS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ParameterMap { /** * Set the given parameters under . * @ param params the other parameter map */ public void setAll ( Map < String , String > params ) { } }
for ( Map . Entry < String , String > entry : params . entrySet ( ) ) { setParameter ( entry . getKey ( ) , entry . getValue ( ) ) ; }
public class Train { /** * save feature index */ private void saveFeatureIndexes ( FeatureIndex [ ] index , ZipModel model ) throws IOException { } }
logger . info ( "save feature index (" + index . length + ")" ) ; // save the indexes for ( int i = 0 ; i < index . length ; i ++ ) { logger . debug ( "dic" + i + " size " + index [ i ] . size ( ) ) ; File tmp = File . createTempFile ( "dic" + i , null ) ; tmp . deleteOnExit ( ) ; BufferedWriter bwd = new BufferedWriter ( new FileWriter ( tmp ) ) ; index [ i ] . write ( bwd ) ; bwd . close ( ) ; // add the dictionary to the model model . add ( tmp , "dic" + i ) ; } // end for
public class GenUtil { /** * Return an " @ Generated " annotation . * @ param clazz the class doing the code generation , NOT the generation target . * @ param indent the number of spaces that the annotation is indented . * @ param includeDate include the date ? * @ param comments joined by a space and used as explanatory comments . */ public static String getGeneratedAnnotation ( Class < ? > clazz , int indent , boolean includeDate , String ... comments ) { } }
final int LINE_LENGTH = 100 ; String comm = StringUtil . join ( comments , " " ) ; boolean hasComment = ! StringUtil . isBlank ( comm ) ; String anno = "@Generated(value={\"" + clazz . getName ( ) + "\"}" ; if ( includeDate ) { anno += "," ; // ISO 8601 date String date = " date=\"" + new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ssZ" ) . format ( new Date ( ) ) + "\"" ; // wrap the date onto a new line if it ' s going to be too long , or if we ' re // adding a comment ( which is also on a new line ) if ( hasComment || anno . length ( ) + date . length ( ) + 1 + indent > LINE_LENGTH ) { // put the date on a new line , space it right date = "\n" + StringUtil . fill ( ' ' , indent + 10 ) + date ; } anno += date ; } if ( hasComment ) { anno += ",\n" + StringUtil . fill ( ' ' , indent + 11 ) + "comments=\"" + comm + "\"" ; } anno += ")" ; return anno ;
public class AbstractObjectStore { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . ObjectStore # captureStatistics ( ) */ public java . util . Map captureStatistics ( ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "captureStatistics" ) ; java . util . Map statistics = new java . util . HashMap ( ) ; // inMemoryTokens set to null after close ( ) . if ( inMemoryTokens != null ) statistics . put ( "inMemoryTokens.size()" , Integer . toString ( inMemoryTokens . size ( ) ) ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "captureStatistics" , statistics ) ; return statistics ;
public class WstxInputFactory { /** * Another internal factory method , used when dealing with a generic * Source base type . One thing worth noting is that ' auto - closing ' * will be enabled if the input source or Reader is constructed ( and * thus owned ) by Woodstox . * @ param forER True , if the reader is being constructed to be used * by an event reader ; false if it is not ( or the purpose is not known ) */ @ SuppressWarnings ( "resource" ) protected XMLStreamReader2 createSR ( javax . xml . transform . Source src , boolean forER ) throws XMLStreamException { } }
ReaderConfig cfg = createPrivateConfig ( ) ; Reader r = null ; InputStream in = null ; String pubId = null ; String sysId = null ; String encoding = null ; boolean autoCloseInput ; InputBootstrapper bs = null ; if ( src instanceof Stax2Source ) { Stax2Source ss = ( Stax2Source ) src ; sysId = ss . getSystemId ( ) ; pubId = ss . getPublicId ( ) ; encoding = ss . getEncoding ( ) ; try { /* 11 - Nov - 2008 , TSa : Let ' s add optimized handling for byte - block * source */ if ( src instanceof Stax2ByteArraySource ) { Stax2ByteArraySource bas = ( Stax2ByteArraySource ) src ; bs = StreamBootstrapper . getInstance ( pubId , SystemId . construct ( sysId ) , bas . getBuffer ( ) , bas . getBufferStart ( ) , bas . getBufferEnd ( ) ) ; } else { in = ss . constructInputStream ( ) ; if ( in == null ) { r = ss . constructReader ( ) ; } } } catch ( IOException ioe ) { throw new WstxIOException ( ioe ) ; } /* Caller has no direct access to stream / reader , Woodstox * owns it and thus has to close too */ autoCloseInput = true ; } else if ( src instanceof StreamSource ) { StreamSource ss = ( StreamSource ) src ; sysId = ss . getSystemId ( ) ; pubId = ss . getPublicId ( ) ; in = ss . getInputStream ( ) ; if ( in == null ) { r = ss . getReader ( ) ; } /* Caller still has access to stream / reader ; no need to * force auto - close - input */ autoCloseInput = cfg . willAutoCloseInput ( ) ; } else if ( src instanceof SAXSource ) { SAXSource ss = ( SAXSource ) src ; /* 28 - Jan - 2006 , TSa : Not a complete implementation , but maybe * even this might help . . . */ sysId = ss . getSystemId ( ) ; InputSource isrc = ss . getInputSource ( ) ; if ( isrc != null ) { encoding = isrc . getEncoding ( ) ; in = isrc . getByteStream ( ) ; if ( in == null ) { r = isrc . getCharacterStream ( ) ; } } /* Caller still has access to stream / reader ; no need to * force auto - close - input */ autoCloseInput = cfg . willAutoCloseInput ( ) ; } else if ( src instanceof DOMSource ) { DOMSource domSrc = ( DOMSource ) src ; // SymbolTable not used by the DOM - based ' reader ' : return WstxDOMWrappingReader . createFrom ( domSrc , cfg ) ; } else { throw new IllegalArgumentException ( "Can not instantiate Stax reader for XML source type " + src . getClass ( ) + " (unrecognized type)" ) ; } if ( bs == null ) { // may have already created boostrapper . . . if ( r != null ) { bs = ReaderBootstrapper . getInstance ( pubId , SystemId . construct ( sysId ) , r , encoding ) ; } else if ( in != null ) { bs = StreamBootstrapper . getInstance ( pubId , SystemId . construct ( sysId ) , in ) ; } else if ( sysId != null && sysId . length ( ) > 0 ) { /* 26 - Dec - 2008 , TSa : If we must construct URL from system id , * it means caller will not have access to resulting * stream , thus we will force auto - closing . */ autoCloseInput = true ; try { return createSR ( cfg , URLUtil . urlFromSystemId ( sysId ) , forER , autoCloseInput ) ; } catch ( IOException ioe ) { throw new WstxIOException ( ioe ) ; } } else { throw new XMLStreamException ( "Can not create Stax reader for the Source passed -- neither reader, input stream nor system id was accessible; can not use other types of sources (like embedded SAX streams)" ) ; } } return createSR ( cfg , sysId , bs , forER , autoCloseInput ) ;
public class Math { /** * Element - wise subtraction of two arrays y = y - x . * @ param y minuend matrix * @ param x subtrahend matrix */ public static void minus ( double [ ] y , double [ ] x ) { } }
if ( x . length != y . length ) { throw new IllegalArgumentException ( String . format ( "Arrays have different length: x[%d], y[%d]" , x . length , y . length ) ) ; } for ( int i = 0 ; i < x . length ; i ++ ) { y [ i ] -= x [ i ] ; }
public class JConsole { /** * If not in the event thread run via SwingUtilities . invokeAndWait ( ) */ private void invokeAndWait ( Runnable run ) { } }
if ( ! SwingUtilities . isEventDispatchThread ( ) ) { try { SwingUtilities . invokeAndWait ( run ) ; } catch ( Exception e ) { // shouldn ' t happen e . printStackTrace ( ) ; } } else { run . run ( ) ; }
public class Promises { /** * Transforms an { @ link AsyncSupplier } { @ code tasks } * to a collection of { @ code Promise } s . */ @ SafeVarargs public static < T > Iterator < Promise < T > > asPromises ( @ NotNull AsyncSupplier < ? extends T > ... tasks ) { } }
return asPromises ( asList ( tasks ) ) ;
public class MPXReader { /** * Populates a task instance . * @ param record MPX record * @ param task task instance * @ throws MPXJException */ private void populateTask ( Record record , Task task ) throws MPXJException { } }
String falseText = LocaleData . getString ( m_locale , LocaleData . NO ) ; int mpxFieldID = 0 ; String field ; int i = 0 ; int length = record . getLength ( ) ; int [ ] model = m_taskModel . getModel ( ) ; while ( i < length ) { mpxFieldID = model [ i ] ; if ( mpxFieldID == - 1 ) { break ; } field = record . getString ( i ++ ) ; if ( ( field == null ) || ( field . length ( ) == 0 ) ) { continue ; } TaskField taskField = MPXTaskField . getMpxjField ( mpxFieldID ) ; if ( taskField == null ) { System . out . println ( "Null Task Field " + mpxFieldID ) ; continue ; } switch ( taskField ) { case PREDECESSORS : case UNIQUE_ID_PREDECESSORS : { populateRelationList ( task , taskField , field ) ; break ; } case PERCENT_COMPLETE : case PERCENT_WORK_COMPLETE : { try { task . set ( taskField , m_formats . getPercentageDecimalFormat ( ) . parse ( field ) ) ; } catch ( ParseException ex ) { throw new MPXJException ( "Failed to parse percentage" , ex ) ; } break ; } case ACTUAL_COST : case BASELINE_COST : case BCWP : case BCWS : case COST : case COST1 : case COST2 : case COST3 : case COST_VARIANCE : case CV : case FIXED_COST : case REMAINING_COST : case SV : { try { task . set ( taskField , m_formats . getCurrencyFormat ( ) . parse ( field ) ) ; } catch ( ParseException ex ) { throw new MPXJException ( "Failed to parse currency" , ex ) ; } break ; } case ACTUAL_DURATION : case ACTUAL_WORK : case BASELINE_DURATION : case BASELINE_WORK : case DURATION : case DURATION1 : case DURATION2 : case DURATION3 : case DURATION_VARIANCE : case FINISH_VARIANCE : case FREE_SLACK : case REMAINING_DURATION : case REMAINING_WORK : case START_VARIANCE : case TOTAL_SLACK : case WORK : case WORK_VARIANCE : case LEVELING_DELAY : { task . set ( taskField , DurationUtility . getInstance ( field , m_formats . getDurationDecimalFormat ( ) , m_locale ) ) ; break ; } case ACTUAL_FINISH : case ACTUAL_START : case BASELINE_FINISH : case BASELINE_START : case CONSTRAINT_DATE : case CREATED : case EARLY_FINISH : case EARLY_START : case FINISH : case FINISH1 : case FINISH2 : case FINISH3 : case FINISH4 : case FINISH5 : case LATE_FINISH : case LATE_START : case RESUME : case START : case START1 : case START2 : case START3 : case START4 : case START5 : case STOP : { try { task . set ( taskField , m_formats . getDateTimeFormat ( ) . parse ( field ) ) ; } catch ( ParseException ex ) { throw new MPXJException ( "Failed to parse date time" , ex ) ; } break ; } case CONFIRMED : case CRITICAL : case FLAG1 : case FLAG2 : case FLAG3 : case FLAG4 : case FLAG5 : case FLAG6 : case FLAG7 : case FLAG8 : case FLAG9 : case FLAG10 : case HIDE_BAR : case LINKED_FIELDS : case MARKED : case MILESTONE : case ROLLUP : case SUMMARY : case UPDATE_NEEDED : { task . set ( taskField , ( ( field . equalsIgnoreCase ( falseText ) == true ) ? Boolean . FALSE : Boolean . TRUE ) ) ; break ; } case CONSTRAINT_TYPE : { task . set ( taskField , ConstraintTypeUtility . getInstance ( m_locale , field ) ) ; break ; } case OBJECTS : case OUTLINE_LEVEL : { task . set ( taskField , Integer . valueOf ( field ) ) ; break ; } case ID : { task . setID ( Integer . valueOf ( field ) ) ; break ; } case UNIQUE_ID : { task . setUniqueID ( Integer . valueOf ( field ) ) ; break ; } case NUMBER1 : case NUMBER2 : case NUMBER3 : case NUMBER4 : case NUMBER5 : { try { task . set ( taskField , m_formats . getDecimalFormat ( ) . parse ( field ) ) ; } catch ( ParseException ex ) { throw new MPXJException ( "Failed to parse number" , ex ) ; } break ; } case PRIORITY : { task . set ( taskField , PriorityUtility . getInstance ( m_locale , field ) ) ; break ; } case TYPE : { boolean fixed = ! field . equalsIgnoreCase ( falseText ) ; task . setType ( fixed ? TaskType . FIXED_DURATION : TaskType . FIXED_UNITS ) ; break ; } default : { task . set ( taskField , field ) ; break ; } } } if ( m_projectConfig . getAutoWBS ( ) == true ) { task . generateWBS ( null ) ; } if ( m_projectConfig . getAutoOutlineNumber ( ) == true ) { task . generateOutlineNumber ( null ) ; } if ( m_projectConfig . getAutoOutlineLevel ( ) == true ) { task . setOutlineLevel ( Integer . valueOf ( 1 ) ) ; } if ( m_projectConfig . getAutoTaskUniqueID ( ) == true ) { task . setUniqueID ( Integer . valueOf ( m_projectConfig . getNextTaskUniqueID ( ) ) ) ; } if ( task . getID ( ) == null || m_projectConfig . getAutoTaskID ( ) == true ) { task . setID ( Integer . valueOf ( m_projectConfig . getNextTaskID ( ) ) ) ; } // Handle malformed MPX files - ensure we have a unique ID if ( task . getUniqueID ( ) == null ) { task . setUniqueID ( task . getID ( ) ) ; } // Some applications ( I ' m looking at you SureTrak ) don ' t write start and finish // attributes . If you open an MPX file like this in MS Project , it will use // the early start and early finish values ( if present ) to populate // the start and finish attributes . if ( task . getStart ( ) == null && task . getEarlyStart ( ) != null ) { task . setStart ( task . getEarlyStart ( ) ) ; } if ( task . getFinish ( ) == null && task . getEarlyFinish ( ) != null ) { task . setFinish ( task . getEarlyFinish ( ) ) ; }
public class ClassLocator { /** * Possible calls : * < ul > * < li > * adams . core . ClassLocator & lt ; packages & gt ; < br > * Prints all the packages in the current classpath * < / li > * < li > * adams . core . ClassLocator & lt ; classname & gt ; & lt ; packagename ( s ) & gt ; < br > * Prints the classes it found . * < / li > * < / ul > * @ param argsthe commandline arguments */ public static void main ( String [ ] args ) { } }
List < String > list ; List < String > packages ; int i ; StringTokenizer tok ; if ( ( args . length == 1 ) && ( args [ 0 ] . equals ( "packages" ) ) ) { list = getSingleton ( ) . findPackages ( ) ; for ( i = 0 ; i < list . size ( ) ; i ++ ) System . out . println ( list . get ( i ) ) ; } else if ( args . length == 2 ) { // packages packages = new ArrayList < > ( ) ; tok = new StringTokenizer ( args [ 1 ] , "," ) ; while ( tok . hasMoreTokens ( ) ) packages . add ( tok . nextToken ( ) ) ; // search list = getSingleton ( ) . findNames ( args [ 0 ] , packages . toArray ( new String [ packages . size ( ) ] ) ) ; // print result , if any System . out . println ( "Searching for '" + args [ 0 ] + "' in '" + args [ 1 ] + "':\n" + " " + list . size ( ) + " found." ) ; for ( i = 0 ; i < list . size ( ) ; i ++ ) System . out . println ( " " + ( i + 1 ) + ". " + list . get ( i ) ) ; } else { System . out . println ( "\nUsage:" ) ; System . out . println ( ClassLocator . class . getName ( ) + " packages" ) ; System . out . println ( "\tlists all packages in the classpath" ) ; System . out . println ( ClassLocator . class . getName ( ) + " <classname> <packagename(s)>" ) ; System . out . println ( "\tlists classes derived from/implementing 'classname' that" ) ; System . out . println ( "\tcan be found in 'packagename(s)' (comma-separated list)" ) ; System . out . println ( ) ; System . exit ( 1 ) ; }
public class Computer { /** * Returns projects that are tied on this node . */ public List < AbstractProject > getTiedJobs ( ) { } }
Node node = getNode ( ) ; return ( node != null ) ? node . getSelfLabel ( ) . getTiedJobs ( ) : Collections . EMPTY_LIST ;
public class NotifierUtils { /** * Get file associated with the given URI */ public static File uriToFile ( URI u ) throws IOException { } }
if ( ! u . getScheme ( ) . equals ( NNStorage . LOCAL_URI_SCHEME ) ) { throw new IOException ( "URI does not represent a file" ) ; } return new File ( u . getPath ( ) ) ;
public class SearchExpressionFacade { /** * Resolves a list of { @ link UIComponent } clientIds and / or passtrough expressions for the given expression or expressions . * @ param context The { @ link FacesContext } . * @ param source The source component . E . g . a button . * @ param expressions The search expressions . * @ return A { @ link List } with resolved clientIds and / or passtrough expression ( like PFS , widgetVar ) . */ public static String resolveClientIds ( FacesContext context , UIComponent source , String expressions ) { } }
return resolveClientIds ( context , source , expressions , SearchExpressionHint . NONE ) ;
public class UsageMeteringMessage { /** * Escapes value for a CSV line and appends it to the ' target ' . */ private void escapeValueTo ( final String field , final StringBuilder target ) { } }
if ( field == null ) { target . append ( DETAILS_QUOTE_CHAR ) ; target . append ( DETAILS_QUOTE_CHAR ) ; return ; } target . append ( DETAILS_QUOTE_CHAR ) ; if ( field . indexOf ( DETAILS_ESCAPE_CHAR ) == - 1 && field . indexOf ( DETAILS_QUOTE_CHAR ) == - 1 ) { target . append ( field ) ; } else { final int len = field . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { final char charAt = field . charAt ( i ) ; if ( charAt == DETAILS_ESCAPE_CHAR || charAt == DETAILS_QUOTE_CHAR ) { target . append ( DETAILS_ESCAPE_CHAR ) ; } target . append ( charAt ) ; } } target . append ( DETAILS_QUOTE_CHAR ) ;
public class StringUtilities { /** * Converts a string so that it can be used in a regular expression . * @ param input The string to be converted . * @ return An escaped string that can be used in a regular expression . */ public static String convertToRegexString ( final String input ) { } }
return input . replaceAll ( "\\\\" , "\\\\" ) . replaceAll ( "\\*" , "\\*" ) . replaceAll ( "\\+" , "\\+" ) . replaceAll ( "\\]" , "\\]" ) . replaceAll ( "\\[" , "\\[" ) . replaceAll ( "\\(" , "\\(" ) . replaceAll ( "\\)" , "\\)" ) . replaceAll ( "\\?" , "\\?" ) . replaceAll ( "\\$" , "\\$" ) . replaceAll ( "\\|" , "\\|" ) . replaceAll ( "\\^" , "\\^" ) . replaceAll ( "\\." , "\\." ) ;
public class ProxyServlet { /** * Destroy this Servlet and any active applications . * This is only called when all users are done using this Servlet . */ public void destroy ( ) { } }
super . destroy ( ) ; if ( m_servletTask != null ) m_servletTask . free ( ) ; m_servletTask = null ; ServletTask . destroyServlet ( ) ;
public class ModelsImpl { /** * Get one entity role for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId entity ID . * @ param roleId entity role ID . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < EntityRole > getCustomEntityRoleAsync ( UUID appId , String versionId , UUID entityId , UUID roleId , final ServiceCallback < EntityRole > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getCustomEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId ) , serviceCallback ) ;
public class TransactionableResourceManager { /** * Registers an object to be shared within the XidContext * @ param key the key of the shared object * @ param value the shared object */ public void putSharedObject ( String key , Object value ) { } }
TransactionContext ctx = contexts . get ( ) ; if ( ctx == null ) { throw new IllegalStateException ( "There is no active transaction context" ) ; } XidContext xidCtx = ctx . getXidContext ( ) ; if ( xidCtx == null ) { throw new IllegalStateException ( "There is no active xid context" ) ; } xidCtx . putSharedObject ( key , value ) ;
public class FastAdapterDiffUtil { /** * convenient function for { @ link # set ( FastItemAdapter , List , DiffCallback , boolean ) } * @ return the adapter to allow chaining */ public static < A extends ModelAdapter < Model , Item > , Model , Item extends IItem > A set ( final A adapter , final List < Item > items , final DiffCallback < Item > callback ) { } }
return set ( adapter , items , callback , true ) ;
public class DashletImpl { /** * Determines the dashlet type from the given type string . * @ param typeString string to parse ( from JSON ) * @ return the dashlet type * @ throws UnknownDashletTypeException if no such type was found */ private Type findType ( String typeString ) throws UnknownDashletTypeException { } }
if ( "viewReport" . equalsIgnoreCase ( typeString ) ) { return Type . VIEW ; } else if ( "textContent" . equalsIgnoreCase ( typeString ) ) { return Type . TEXT ; } throw new UnknownDashletTypeException ( typeString ) ;
public class br_remotelicense { /** * < pre > * Use this operation to configure Remote license server on Repeater Instances . * < / pre > */ public static br_remotelicense configureremotelicense ( nitro_service client , br_remotelicense resource ) throws Exception { } }
return ( ( br_remotelicense [ ] ) resource . perform_operation ( client , "configureremotelicense" ) ) [ 0 ] ;
public class JobTracker { /** * Test Methods */ synchronized Set < ReasonForBlackListing > getReasonForBlackList ( String host ) { } }
FaultInfo fi = faultyTrackers . getFaultInfo ( host , false ) ; if ( fi == null ) { return new HashSet < ReasonForBlackListing > ( ) ; } return fi . getReasonforblacklisting ( ) ;
public class ViewSet { /** * Creates a system context view , where the scope of the view is the specified software system . * @ param softwareSystem the SoftwareSystem object representing the scope of the view * @ param key the key for the view ( must be unique ) * @ param description a description of the view * @ return a SystemContextView object * @ throws IllegalArgumentException if the software system is null or the key is not unique */ public SystemContextView createSystemContextView ( SoftwareSystem softwareSystem , String key , String description ) { } }
assertThatTheSoftwareSystemIsNotNull ( softwareSystem ) ; assertThatTheViewKeyIsSpecifiedAndUnique ( key ) ; SystemContextView view = new SystemContextView ( softwareSystem , key , description ) ; view . setViewSet ( this ) ; systemContextViews . add ( view ) ; return view ;
public class Pipes { /** * Register a Queue , and getValue back a listening LazyFutureStream that runs on a single thread * ( not the calling thread ) * < pre > * { @ code * Pipes . register ( " test " , QueueFactories . * < String > boundedNonBlockingQueue ( 100) * . build ( ) ) ; * LazyFutureStream < String > stream = PipesToLazyStreams . cpuBoundStream ( " test " ) ; * stream . filter ( it - > it ! = null ) . peek ( System . out : : println ) . run ( ) ; * } < / pre > * @ param key : Adapter identifier * @ param adapter */ public void register ( final K key , final Adapter < V > adapter ) { } }
registered . put ( key , adapter ) ;
public class AssociateProductWithPortfolioRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AssociateProductWithPortfolioRequest associateProductWithPortfolioRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( associateProductWithPortfolioRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( associateProductWithPortfolioRequest . getAcceptLanguage ( ) , ACCEPTLANGUAGE_BINDING ) ; protocolMarshaller . marshall ( associateProductWithPortfolioRequest . getProductId ( ) , PRODUCTID_BINDING ) ; protocolMarshaller . marshall ( associateProductWithPortfolioRequest . getPortfolioId ( ) , PORTFOLIOID_BINDING ) ; protocolMarshaller . marshall ( associateProductWithPortfolioRequest . getSourcePortfolioId ( ) , SOURCEPORTFOLIOID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RegistriesInner { /** * Updates the policies for the specified container registry . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param registryPoliciesUpdateParameters The parameters for updating policies of a container registry . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RegistryPoliciesInner > updatePoliciesAsync ( String resourceGroupName , String registryName , RegistryPoliciesInner registryPoliciesUpdateParameters , final ServiceCallback < RegistryPoliciesInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updatePoliciesWithServiceResponseAsync ( resourceGroupName , registryName , registryPoliciesUpdateParameters ) , serviceCallback ) ;
public class MetricsClient { /** * Creates or updates a logs - based metric . * < p > Sample code : * < pre > < code > * try ( MetricsClient metricsClient = MetricsClient . create ( ) ) { * MetricName metricName = ProjectMetricName . of ( " [ PROJECT ] " , " [ METRIC ] " ) ; * LogMetric metric = LogMetric . newBuilder ( ) . build ( ) ; * LogMetric response = metricsClient . updateLogMetric ( metricName . toString ( ) , metric ) ; * < / code > < / pre > * @ param metricName The resource name of the metric to update : * < p > " projects / [ PROJECT _ ID ] / metrics / [ METRIC _ ID ] " * < p > The updated metric must be provided in the request and it ' s ` name ` field must be the * same as ` [ METRIC _ ID ] ` If the metric does not exist in ` [ PROJECT _ ID ] ` , then a new metric is * created . * @ param metric The updated metric . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final LogMetric updateLogMetric ( String metricName , LogMetric metric ) { } }
UpdateLogMetricRequest request = UpdateLogMetricRequest . newBuilder ( ) . setMetricName ( metricName ) . setMetric ( metric ) . build ( ) ; return updateLogMetric ( request ) ;
public class ListenerList { /** * Adds a listener to the listener list in the supplied map . If no list exists , one will be * created and mapped to the supplied key . */ public static < L , K > void addListener ( Map < K , ListenerList < L > > map , K key , L listener ) { } }
ListenerList < L > list = map . get ( key ) ; if ( list == null ) { map . put ( key , list = new ListenerList < L > ( ) ) ; } list . add ( listener ) ;
public class Alias { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPMessageHandlerControllable # isForeign ( ) */ public boolean isForeign ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isForeign" ) ; boolean isForeign = aliasDest . isForeign ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isForeign" , new Boolean ( isForeign ) ) ; return isForeign ;
public class CmsVfsDriver { /** * Removes a resource physically in the database . < p > * @ param dbc the current database context * @ param currentProject the current project * @ param resource the folder to remove * @ throws CmsDataAccessException if something goes wrong */ protected void internalRemoveFolder ( CmsDbContext dbc , CmsProject currentProject , CmsResource resource ) throws CmsDataAccessException { } }
PreparedStatement stmt = null ; Connection conn = null ; try { conn = m_sqlManager . getConnection ( dbc ) ; // delete the structure record stmt = m_sqlManager . getPreparedStatement ( conn , currentProject , "C_STRUCTURE_DELETE_BY_STRUCTUREID" ) ; stmt . setString ( 1 , resource . getStructureId ( ) . toString ( ) ) ; stmt . executeUpdate ( ) ; m_sqlManager . closeAll ( dbc , null , stmt , null ) ; // delete the resource record stmt = m_sqlManager . getPreparedStatement ( conn , currentProject , "C_RESOURCES_DELETE_BY_RESOURCEID" ) ; stmt . setString ( 1 , resource . getResourceId ( ) . toString ( ) ) ; stmt . executeUpdate ( ) ; } catch ( SQLException e ) { throw new CmsDbSqlException ( Messages . get ( ) . container ( Messages . ERR_GENERIC_SQL_1 , CmsDbSqlException . getErrorQuery ( stmt ) ) , e ) ; } finally { m_sqlManager . closeAll ( dbc , conn , stmt , null ) ; }
public class PrefixMappedItemCache { /** * Checks if the { @ link CacheValue } has expired . * @ param value the value to check . * @ return true if the value has expired , false otherwise . */ private < V > boolean isExpired ( CacheValue < V > value ) { } }
long diff = ticker . read ( ) - value . getCreationTime ( ) ; return diff > maxEntryAgeNanos ;
public class TypeHelper { /** * Unwinds parametrized type into plain list that contains all parameters for the given type including nested parameterized types , * for example calling the method for the following type * < code > * GType < GType < GDoubleType < GType < GDoubleType < Parent , Parent > > , Parent > > > * < / code > * will return list of 8 elements : * < code > * [ GType , GType , GDoubleType , GType , GDoubleType , Parent , Parent , Parent ] * < / code > * if the given type is not parametrized then returns list with one element which is given type passed into method . * @ param type the parameterized type * @ return list of { @ link Type } */ @ ParametersAreNonnullByDefault public static List < Type > flattenTypeVariables ( Type type ) { } }
Validate . notNull ( type , "type cannot be null" ) ; List < Type > types = new ArrayList < Type > ( ) ; TreeTraverser < Type > typeTraverser = new TreeTraverser < Type > ( ) { @ Override public Iterable < Type > children ( Type root ) { if ( root instanceof ParameterizedType ) { ParameterizedType pType = ( ParameterizedType ) root ; return Arrays . asList ( pType . getActualTypeArguments ( ) ) ; } else if ( root instanceof TypeVariable ) { TypeVariable pType = ( TypeVariable ) root ; return Arrays . asList ( pType . getBounds ( ) ) ; } return Collections . emptyList ( ) ; } } ; for ( Type t : typeTraverser . breadthFirstTraversal ( type ) ) { types . add ( t ) ; } return types ;
public class Query { /** * Returns a new { @ link GqlQuery } builder . * < p > Example of creating and running a typed GQL query . * < pre > { @ code * String kind = " my _ kind " ; * String gqlQuery = " select * from " + kind ; * Query < Entity > query = Query . newGqlQueryBuilder ( Query . ResultType . ENTITY , gqlQuery ) . build ( ) ; * QueryResults < Entity > results = datastore . run ( query ) ; * / / Use results * } < / pre > * @ see < a href = " https : / / cloud . google . com / datastore / docs / apis / gql / gql _ reference " > GQL Reference < / a > */ public static < V > GqlQuery . Builder < V > newGqlQueryBuilder ( ResultType < V > resultType , String gql ) { } }
return new GqlQuery . Builder < > ( resultType , gql ) ;
public class BELScriptParser { /** * BELScript . g : 88:1 : unset : ' UNSET ' ( OBJECT _ IDENT | IDENT _ LIST ) ; */ public final BELScriptParser . unset_return unset ( ) throws RecognitionException { } }
BELScriptParser . unset_return retval = new BELScriptParser . unset_return ( ) ; retval . start = input . LT ( 1 ) ; Object root_0 = null ; Token string_literal31 = null ; Token set32 = null ; Object string_literal31_tree = null ; Object set32_tree = null ; paraphrases . push ( "in unset." ) ; try { // BELScript . g : 91:5 : ( ' UNSET ' ( OBJECT _ IDENT | IDENT _ LIST ) ) // BELScript . g : 92:5 : ' UNSET ' ( OBJECT _ IDENT | IDENT _ LIST ) { root_0 = ( Object ) adaptor . nil ( ) ; string_literal31 = ( Token ) match ( input , 26 , FOLLOW_26_in_unset381 ) ; string_literal31_tree = ( Object ) adaptor . create ( string_literal31 ) ; adaptor . addChild ( root_0 , string_literal31_tree ) ; set32 = ( Token ) input . LT ( 1 ) ; if ( input . LA ( 1 ) == OBJECT_IDENT || input . LA ( 1 ) == IDENT_LIST ) { input . consume ( ) ; adaptor . addChild ( root_0 , ( Object ) adaptor . create ( set32 ) ) ; state . errorRecovery = false ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; throw mse ; } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( Object ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; paraphrases . pop ( ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( Object ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ;
public class JSONObject { /** * Internal method for doing a simple indention write . * @ param writer The writer to use while writing the JSON text . * @ param indentDepth How deep to indent the text . * @ throws IOException Trhown if an error occurs on write . */ private void writeIndention ( Writer writer , int indentDepth ) throws IOException { } }
if ( logger . isLoggable ( Level . FINER ) ) logger . entering ( className , "writeIndention(Writer, int)" ) ; try { for ( int i = 0 ; i < indentDepth ; i ++ ) { writer . write ( indent ) ; } } catch ( Exception ex ) { IOException iox = new IOException ( "Error occurred on serialization of JSON text." ) ; iox . initCause ( ex ) ; throw iox ; } if ( logger . isLoggable ( Level . FINER ) ) logger . exiting ( className , "writeIndention(Writer, int)" ) ;
public class MurmurHash3 { /** * Hashes a byte - array fragment using MurmurHash3. * @ param array a byte array . * @ param offset the first valid byte in { @ code array } . * @ param length the number of valid elements in { @ code array } . * @ param seed a seed . * @ return a 64 - bit MurmurHash3 hash for the specified fragment . */ public final static long hash ( final byte [ ] array , final int offset , final int length , final long seed ) { } }
long h1 = 0x9368e53c2f6af274L ^ seed ; long h2 = 0x586dcd208f7cd3fdL ^ seed ; long c1 = 0x87c37b91114253d5L ; long c2 = 0x4cf5ad432745937fL ; long k1 = 0 ; long k2 = 0 ; for ( int i = 0 ; i < length / 16 ; i ++ ) { k1 = getblock ( array , offset + i * 2 * 8 ) ; k2 = getblock ( array , offset + ( i * 2 + 1 ) * 8 ) ; k1 *= c1 ; k1 = ( k1 << 23 ) | ( k1 >>> 64 - 23 ) ; k1 *= c2 ; h1 ^= k1 ; h1 += h2 ; h2 = ( h2 << 41 ) | ( h2 >>> 64 - 41 ) ; k2 *= c2 ; k2 = ( k2 << 23 ) | ( k2 >>> 64 - 23 ) ; k2 *= c1 ; h2 ^= k2 ; h2 += h1 ; h1 = h1 * 3 + 0x52dce729 ; h2 = h2 * 3 + 0x38495ab5 ; c1 = c1 * 5 + 0x7b7d159c ; c2 = c2 * 5 + 0x6bce6396 ; } k1 = 0 ; k2 = 0 ; final int tail = offset + ( ( length >>> 4 ) << 4 ) ; switch ( length & 15 ) { case 15 : k2 ^= ( long ) array [ tail + 14 ] << 48 ; case 14 : k2 ^= ( long ) array [ tail + 13 ] << 40 ; case 13 : k2 ^= ( long ) array [ tail + 12 ] << 32 ; case 12 : k2 ^= ( long ) array [ tail + 11 ] << 24 ; case 11 : k2 ^= ( long ) array [ tail + 10 ] << 16 ; case 10 : k2 ^= ( long ) array [ tail + 9 ] << 8 ; case 9 : k2 ^= ( long ) array [ tail + 8 ] << 0 ; case 8 : k1 ^= ( long ) array [ tail + 7 ] << 56 ; case 7 : k1 ^= ( long ) array [ tail + 6 ] << 48 ; case 6 : k1 ^= ( long ) array [ tail + 5 ] << 40 ; case 5 : k1 ^= ( long ) array [ tail + 4 ] << 32 ; case 4 : k1 ^= ( long ) array [ tail + 3 ] << 24 ; case 3 : k1 ^= ( long ) array [ tail + 2 ] << 16 ; case 2 : k1 ^= ( long ) array [ tail + 1 ] << 8 ; case 1 : k1 ^= ( long ) array [ tail + 0 ] << 0 ; k1 *= c1 ; k1 = ( k1 << 23 ) | ( k1 >>> 64 - 23 ) ; k1 *= c2 ; h1 ^= k1 ; h1 += h2 ; h2 = ( h2 << 41 ) | ( h2 >>> 64 - 41 ) ; k2 *= c2 ; k2 = ( k2 << 23 ) | ( k2 >>> 64 - 23 ) ; k2 *= c1 ; h2 ^= k2 ; h2 += h1 ; h1 = h1 * 3 + 0x52dce729 ; h2 = h2 * 3 + 0x38495ab5 ; c1 = c1 * 5 + 0x7b7d159c ; c2 = c2 * 5 + 0x6bce6396 ; } h2 ^= length ; h1 += h2 ; h2 += h1 ; h1 = fmix ( h1 ) ; h2 = fmix ( h2 ) ; h1 += h2 ; return h1 ;
public class InstanceGroupClient { /** * Retrieves the list of instance groups and sorts them by zone . * < p > Sample code : * < pre > < code > * try ( InstanceGroupClient instanceGroupClient = InstanceGroupClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( InstanceGroupsScopedList element : instanceGroupClient . aggregatedListInstanceGroups ( project . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListInstanceGroupsPagedResponse aggregatedListInstanceGroups ( String project ) { } }
AggregatedListInstanceGroupsHttpRequest request = AggregatedListInstanceGroupsHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return aggregatedListInstanceGroups ( request ) ;
public class CreateDirectConnectGatewayAssociationProposalRequest { /** * The Amazon VPC prefixes to advertise to the Direct Connect gateway . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAddAllowedPrefixesToDirectConnectGateway ( java . util . Collection ) } or * { @ link # withAddAllowedPrefixesToDirectConnectGateway ( java . util . Collection ) } if you want to override the existing * values . * @ param addAllowedPrefixesToDirectConnectGateway * The Amazon VPC prefixes to advertise to the Direct Connect gateway . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateDirectConnectGatewayAssociationProposalRequest withAddAllowedPrefixesToDirectConnectGateway ( RouteFilterPrefix ... addAllowedPrefixesToDirectConnectGateway ) { } }
if ( this . addAllowedPrefixesToDirectConnectGateway == null ) { setAddAllowedPrefixesToDirectConnectGateway ( new com . amazonaws . internal . SdkInternalList < RouteFilterPrefix > ( addAllowedPrefixesToDirectConnectGateway . length ) ) ; } for ( RouteFilterPrefix ele : addAllowedPrefixesToDirectConnectGateway ) { this . addAllowedPrefixesToDirectConnectGateway . add ( ele ) ; } return this ;
public class NumberUtil { /** * 生成不重复随机数 根据给定的最小数字和最大数字 , 以及随机数的个数 , 产生指定的不重复的数组 * @ param begin 最小数字 ( 包含该数 ) * @ param end 最大数字 ( 不包含该数 ) * @ param size 指定产生随机数的个数 * @ return 随机int数组 */ public static Integer [ ] generateBySet ( int begin , int end , int size ) { } }
if ( begin > end ) { int temp = begin ; begin = end ; end = temp ; } // 加入逻辑判断 , 确保begin < end并且size不能大于该表示范围 if ( ( end - begin ) < size ) { throw new UtilException ( "Size is larger than range between begin and end!" ) ; } Random ran = new Random ( ) ; Set < Integer > set = new HashSet < Integer > ( ) ; while ( set . size ( ) < size ) { set . add ( begin + ran . nextInt ( end - begin ) ) ; } Integer [ ] ranArr = set . toArray ( new Integer [ size ] ) ; return ranArr ;
public class InternalSARLParser { /** * InternalSARL . g : 10103:1 : ruleXConstructorCall returns [ EObject current = null ] : ( this _ XbaseConstructorCall _ 0 = ruleXbaseConstructorCall ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? ) ; */ public final EObject ruleXConstructorCall ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; EObject this_XbaseConstructorCall_0 = null ; EObject lv_members_3_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 10109:2 : ( ( this _ XbaseConstructorCall _ 0 = ruleXbaseConstructorCall ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? ) ) // InternalSARL . g : 10110:2 : ( this _ XbaseConstructorCall _ 0 = ruleXbaseConstructorCall ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? ) { // InternalSARL . g : 10110:2 : ( this _ XbaseConstructorCall _ 0 = ruleXbaseConstructorCall ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? ) // InternalSARL . g : 10111:3 : this _ XbaseConstructorCall _ 0 = ruleXbaseConstructorCall ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXConstructorCallAccess ( ) . getXbaseConstructorCallParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_10 ) ; this_XbaseConstructorCall_0 = ruleXbaseConstructorCall ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XbaseConstructorCall_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalSARL . g : 10119:3 : ( ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' ) ? int alt259 = 2 ; alt259 = dfa259 . predict ( input ) ; switch ( alt259 ) { case 1 : // InternalSARL . g : 10120:4 : ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) ( ( lv _ members _ 3_0 = ruleMember ) ) * otherlv _ 4 = ' } ' { // InternalSARL . g : 10120:4 : ( ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) ) // InternalSARL . g : 10121:5 : ( ( ( ) ' { ' ) ) = > ( ( ) otherlv _ 2 = ' { ' ) { // InternalSARL . g : 10127:5 : ( ( ) otherlv _ 2 = ' { ' ) // InternalSARL . g : 10128:6 : ( ) otherlv _ 2 = ' { ' { // InternalSARL . g : 10128:6 : ( ) // InternalSARL . g : 10129:7: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXConstructorCallAccess ( ) . getAnonymousClassConstructorCallAction_1_0_0_0 ( ) , current ) ; } } otherlv_2 = ( Token ) match ( input , 29 , FOLLOW_29 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXConstructorCallAccess ( ) . getLeftCurlyBracketKeyword_1_0_0_1 ( ) ) ; } } } // InternalSARL . g : 10141:4 : ( ( lv _ members _ 3_0 = ruleMember ) ) * loop258 : do { int alt258 = 2 ; int LA258_0 = input . LA ( 1 ) ; if ( ( LA258_0 == 25 || LA258_0 == 39 || ( LA258_0 >= 42 && LA258_0 <= 45 ) || LA258_0 == 48 || ( LA258_0 >= 65 && LA258_0 <= 66 ) || ( LA258_0 >= 78 && LA258_0 <= 91 ) || LA258_0 == 105 ) ) { alt258 = 1 ; } switch ( alt258 ) { case 1 : // InternalSARL . g : 10142:5 : ( lv _ members _ 3_0 = ruleMember ) { // InternalSARL . g : 10142:5 : ( lv _ members _ 3_0 = ruleMember ) // InternalSARL . g : 10143:6 : lv _ members _ 3_0 = ruleMember { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXConstructorCallAccess ( ) . getMembersMemberParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_29 ) ; lv_members_3_0 = ruleMember ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXConstructorCallRule ( ) ) ; } add ( current , "members" , lv_members_3_0 , "io.sarl.lang.SARL.Member" ) ; afterParserOrEnumRuleCall ( ) ; } } } break ; default : break loop258 ; } } while ( true ) ; otherlv_4 = ( Token ) match ( input , 30 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXConstructorCallAccess ( ) . getRightCurlyBracketKeyword_1_2 ( ) ) ; } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class AkkaInvocationHandler { /** * Sends the message to the RPC endpoint and returns a future containing * its response . * @ param message to send to the RPC endpoint * @ param timeout time to wait until the response future is failed with a { @ link TimeoutException } * @ return Response future */ protected CompletableFuture < ? > ask ( Object message , Time timeout ) { } }
return FutureUtils . toJava ( Patterns . ask ( rpcEndpoint , message , timeout . toMilliseconds ( ) ) ) ;
public class AbstractCassandraStorage { /** * get pig type for the cassandra data type */ protected byte getPigType ( AbstractType type ) { } }
if ( type instanceof LongType || type instanceof DateType || type instanceof TimestampType ) // DateType is bad and it should feel bad return DataType . LONG ; else if ( type instanceof IntegerType || type instanceof Int32Type ) // IntegerType will overflow at 2 * * 31 , but is kept for compatibility until pig has a BigInteger return DataType . INTEGER ; else if ( type instanceof AsciiType || type instanceof UTF8Type || type instanceof DecimalType || type instanceof InetAddressType ) return DataType . CHARARRAY ; else if ( type instanceof FloatType ) return DataType . FLOAT ; else if ( type instanceof DoubleType ) return DataType . DOUBLE ; else if ( type instanceof AbstractCompositeType || type instanceof CollectionType ) return DataType . TUPLE ; return DataType . BYTEARRAY ;
public class XmlSlurper { /** * / * ( non - Javadoc ) * @ see org . xml . sax . helpers . DefaultHandler # startPrefixMapping ( java . lang . String , java . lang . String ) */ public void startPrefixMapping ( final String tag , final String uri ) throws SAXException { } }
if ( namespaceAware ) namespaceTagHints . put ( tag , uri ) ;
public class TypeSignature { /** * Creates a new type signature for the map with the specified key and value type signatures . * This method is a shortcut of : * < pre > { @ code * ofMap ( " map " , keyTypeSignature , valueTypeSignature ) ; * } < / pre > */ public static TypeSignature ofMap ( TypeSignature keyTypeSignature , TypeSignature valueTypeSignature ) { } }
requireNonNull ( keyTypeSignature , "keyTypeSignature" ) ; requireNonNull ( valueTypeSignature , "valueTypeSignature" ) ; return ofContainer ( "map" , keyTypeSignature , valueTypeSignature ) ;
public class CommerceAccountPersistenceImpl { /** * Returns the first commerce account in the ordered set where userId = & # 63 ; and type = & # 63 ; . * @ param userId the user ID * @ param type the type * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce account , or < code > null < / code > if a matching commerce account could not be found */ @ Override public CommerceAccount fetchByU_T_First ( long userId , int type , OrderByComparator < CommerceAccount > orderByComparator ) { } }
List < CommerceAccount > list = findByU_T ( userId , type , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class CmsEncoder { /** * Checks if a given encoding name is actually supported , and if so * resolves it to it ' s canonical name , if not it returns the given fallback * value . < p > * Charsets have a set of aliases . For example , valid aliases for " UTF - 8" * are " UTF8 " , " utf - 8 " or " utf8 " . This method resolves any given valid charset name * to it ' s " canonical " form , so that simple String comparison can be used * when checking charset names internally later . < p > * Please see < a href = " http : / / www . iana . org / assignments / character - sets " > http : / / www . iana . org / assignments / character - sets < / a > * for a list of valid charset alias names . < p > * @ param encoding the encoding to check and resolve * @ param fallback the fallback encoding scheme * @ return the resolved encoding name , or the fallback value */ public static String lookupEncoding ( String encoding , String fallback ) { } }
String result = m_encodingCache . get ( encoding ) ; if ( result != null ) { return result ; } try { result = Charset . forName ( encoding ) . name ( ) ; m_encodingCache . put ( encoding , result ) ; return result ; } catch ( Throwable t ) { // we will use the default value as fallback } return fallback ;
public class HubertKappaAgreement { /** * Calculates the expected inter - rater agreement that assumes the same * distribution for all raters and annotations . * @ throws NullPointerException if the annotation study is null . * @ throws ArithmeticException if there are no items in the * annotation study . */ @ Override public double calculateExpectedAgreement ( ) { } }
Map < Object , int [ ] > annotationsPerCategory = CodingAnnotationStudy . countAnnotationsPerCategory ( study ) ; BigDecimal result = BigDecimal . ZERO ; for ( Object category : study . getCategories ( ) ) { int [ ] annotationCounts = annotationsPerCategory . get ( category ) ; for ( int m = 0 ; m < study . getRaterCount ( ) ; m ++ ) for ( int n = m + 1 ; n < study . getRaterCount ( ) ; n ++ ) result = result . add ( new BigDecimal ( annotationCounts [ m ] ) . multiply ( new BigDecimal ( annotationCounts [ n ] ) ) ) ; } result = result . multiply ( new BigDecimal ( 2 ) ) ; result = result . divide ( new BigDecimal ( study . getRaterCount ( ) ) . multiply ( new BigDecimal ( study . getRaterCount ( ) - 1 ) ) . multiply ( new BigDecimal ( study . getItemCount ( ) ) . pow ( 2 ) ) , MathContext . DECIMAL128 ) ; return result . doubleValue ( ) ; }
public class ValidatorRESTHandler { /** * Populate JSON object for a top level exception or error . * @ param errorInfo additional information to append to exceptions and causes * @ param error the top level exception or error . * @ return JSON object representing the Throwable . */ @ SuppressWarnings ( "unchecked" ) @ Trivial private JSONObject toJSONObjectForThrowable ( Map < String , ? > errorInfo , Throwable error ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "toJSONObjectForThrowable" , errorInfo , error ) ; LinkedHashMap < String , List < ? > > addedInfo = new LinkedHashMap < String , List < ? > > ( ) ; for ( Map . Entry < String , ? > entry : errorInfo . entrySet ( ) ) { Object value = entry . getValue ( ) ; if ( value instanceof List ) addedInfo . put ( entry . getKey ( ) , ( List < ? > ) value ) ; } JSONObject json = new OrderedJSONObject ( ) ; int index = 0 ; JSONObject current = json ; Set < Throwable > causes = new HashSet < Throwable > ( ) ; // avoid cycles in exception chain for ( Throwable cause = error ; cause != null && causes . add ( cause ) ; index ++ ) { // custom attributes for position in exception chain ( for example , sqlState and errorCode for dataSource ) for ( Map . Entry < String , List < ? > > entry : addedInfo . entrySet ( ) ) { List < ? > values = entry . getValue ( ) ; if ( values . size ( ) > index ) { Object value = values . get ( index ) ; if ( value != null ) current . put ( entry . getKey ( ) , value ) ; } } current . put ( "class" , cause . getClass ( ) . getName ( ) ) ; current . put ( "message" , cause . getMessage ( ) ) ; JSONArray stack = new JSONArray ( ) ; for ( StackTraceElement element : cause . getStackTrace ( ) ) stack . add ( element . toString ( ) ) ; current . put ( "stack" , stack ) ; if ( ( cause = cause . getCause ( ) ) != null ) { Map < String , Object > parent = current ; parent . put ( "cause" , current = new OrderedJSONObject ( ) ) ; } } return json ;
public class SemaphoreBulkhead { /** * { @ inheritDoc } */ @ Override public void changeConfig ( final BulkheadConfig newConfig ) { } }
synchronized ( configChangesLock ) { int delta = newConfig . getMaxConcurrentCalls ( ) - config . getMaxConcurrentCalls ( ) ; if ( delta < 0 ) { semaphore . acquireUninterruptibly ( - delta ) ; } else if ( delta > 0 ) { semaphore . release ( delta ) ; } config = newConfig ; }
public class BoxTableExtractor { /** * Adjusts the table column information by analyzing the coordinate information of each text pieces in the table bounday area * @ param cc * the number of the table column * @ param b * the X - axis of the left - end of the current table piece * @ param e * the X - axis of the right end of the current table piece * @ param leftX _ tableColumns * the array to store the left - end X axes for all the table columns * @ param rightX _ tableColumns * the array to store the right - end X axes for all the table columns * @ return * the updated table column information ( int value ) */ public int adjustAColumn ( int cc , float b , float e , float [ ] leftX_tableColumns , float [ ] rightX_tableColumns ) { } }
for ( int bb = 0 ; bb < cc ; bb ++ ) { float c = leftX_tableColumns [ bb ] ; float d = rightX_tableColumns [ bb ] ; if ( ( bb > 0 ) && ( bb < ( cc - 1 ) ) ) { float a = rightX_tableColumns [ bb - 1 ] ; float f = leftX_tableColumns [ bb + 1 ] ; if ( ( a < b ) && ( b <= c ) && ( d <= e ) && ( e < f ) ) { // case 1 : both ends of the new cell exceed the boundary of current column , but not overlap with the 2nd column . We should update the boundary of the current column in both ends leftX_tableColumns [ bb ] = b ; rightX_tableColumns [ bb ] = e ; } if ( ( a < b ) && ( b < c ) && ( c < e ) && ( e < d ) ) // case 2 : only the left end of the new cell exceed the boundary of the current column , only need to update the left side leftX_tableColumns [ bb ] = b ; if ( ( c < b ) && ( b < d ) && ( d < e ) && ( e < f ) ) // case 3 : only the right end of the new cell exceed the boundary of the current column , only need to update the right side rightX_tableColumns [ bb ] = e ; } if ( bb == 0 ) { float f = leftX_tableColumns [ bb + 1 ] ; if ( ( b <= c ) && ( d <= e ) && ( e < f ) ) { // case 4 : both ends of the new cell exceed the boundary of first column , but not overlap with the 2nd column . We should update the boundary of the 1st column in both ends leftX_tableColumns [ 0 ] = b ; rightX_tableColumns [ 0 ] = e ; } if ( ( b < c ) && ( c < e ) && ( e < d ) ) // case 5 : only the left end of the new cell exceed the boundary of the 1st column , only need to update the left side leftX_tableColumns [ 0 ] = b ; if ( ( c < b ) && ( b < d ) && ( d < e ) && ( e < f ) ) // case 6 : only the right end of the new cell exceed the boundary of the 1st column , only need to update the right side rightX_tableColumns [ 0 ] = e ; if ( ( c - e ) > m_docInfo . getAverageLineGap ( ) * 2.0 ) { // new left - most column cc ++ ; for ( int t = ( cc - 1 ) ; t > 0 ; t -- ) { leftX_tableColumns [ t ] = leftX_tableColumns [ t - 1 ] ; rightX_tableColumns [ t ] = rightX_tableColumns [ t - 1 ] ; } leftX_tableColumns [ 0 ] = b ; rightX_tableColumns [ 0 ] = e ; } } if ( bb == ( cc - 1 ) && ( bb > 0 ) ) { float a = rightX_tableColumns [ bb - 1 ] ; if ( ( a < b ) && ( b <= c ) && ( d <= e ) ) { // case 7 leftX_tableColumns [ bb ] = b ; rightX_tableColumns [ bb ] = e ; } if ( ( a < b ) && ( b < c ) && ( c < e ) && ( e < d ) ) // case 8 leftX_tableColumns [ bb ] = b ; if ( ( c < b ) && ( b < d ) && ( d < e ) ) // case 9 rightX_tableColumns [ bb ] = e ; if ( ( b - d ) > m_docInfo . getAverageLineGap ( ) * 2.0 ) { // new right - most column cc ++ ; leftX_tableColumns [ cc - 1 ] = b ; rightX_tableColumns [ cc - 1 ] = e ; } } } // end ( for bb < cc ) return ( cc ) ;
public class RectifyImageOps { /** * Adjust the rectification such that only pixels which overlap the original left image can be seen . For use with * uncalibrated images with unknown baselines . Image processing is easier since only the " true " image pixels * are visible , but information along the image border has been discarded . The rectification matrices are * overwritten with adjusted values on output . * @ param imageWidth Width of left image . * @ param imageHeight Height of left image . * @ param rectifyLeft Rectification matrix for left image . Input and Output . Modified . * @ param rectifyRight Rectification matrix for right image . Input and Output . Modified . */ public static void allInsideLeft ( int imageWidth , int imageHeight , FMatrixRMaj rectifyLeft , FMatrixRMaj rectifyRight ) { } }
ImplRectifyImageOps_F32 . allInsideLeft ( imageWidth , imageHeight , rectifyLeft , rectifyRight ) ;
public class FinalMappings { /** * { @ inheritDoc } */ @ Override synchronized public Iterator < Map < Integer , Integer > > getIterator ( ) { } }
Iterator < Map < Integer , Integer > > iterator = mappings . iterator ( ) ; return iterator ;
public class Migrator { /** * Loads { @ link GitHubPushTrigger . DescriptorImpl } and migrate all values * to { @ link org . jenkinsci . plugins . github . config . GitHubPluginConfig } * @ throws IOException if any read - save problems as it critical to work process of this plugin */ public void migrate ( ) throws IOException { } }
LOGGER . debug ( "Check if GitHub Plugin needs config migration" ) ; GitHubPushTrigger . DescriptorImpl descriptor = GitHubPushTrigger . DescriptorImpl . get ( ) ; descriptor . load ( ) ; if ( isNotEmpty ( descriptor . getCredentials ( ) ) ) { LOGGER . warn ( "Migration for old GitHub Plugin credentials started" ) ; GitHubPlugin . configuration ( ) . getConfigs ( ) . addAll ( from ( descriptor . getCredentials ( ) ) . transform ( toGHServerConfig ( ) ) . toList ( ) ) ; descriptor . clearCredentials ( ) ; descriptor . save ( ) ; GitHubPlugin . configuration ( ) . save ( ) ; } if ( descriptor . getDeprecatedHookUrl ( ) != null ) { LOGGER . warn ( "Migration for old GitHub Plugin hook url started" ) ; GitHubPlugin . configuration ( ) . setOverrideHookUrl ( true ) ; GitHubPlugin . configuration ( ) . setHookUrl ( descriptor . getDeprecatedHookUrl ( ) ) ; descriptor . clearDeprecatedHookUrl ( ) ; descriptor . save ( ) ; GitHubPlugin . configuration ( ) . save ( ) ; }
public class StreamMessageImpl { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . common . message . AbstractMessage # unserializeBodyFrom ( net . timewalker . ffmq4 . utils . RawDataInputStream ) */ @ Override protected void unserializeBodyFrom ( RawDataBuffer in ) { } }
int size = in . readInt ( ) ; body . ensureCapacity ( size ) ; for ( int n = 0 ; n < size ; n ++ ) { Object value = in . readGeneric ( ) ; body . add ( value ) ; }
public class InstanceAdminClient { /** * Lists all instances in the given project . * < p > Sample code : * < pre > < code > * try ( InstanceAdminClient instanceAdminClient = InstanceAdminClient . create ( ) ) { * ProjectName parent = ProjectName . of ( " [ PROJECT ] " ) ; * for ( Instance element : instanceAdminClient . listInstances ( parent ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Required . The name of the project for which a list of instances is requested . * Values are of the form ` projects / & lt ; project & gt ; ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListInstancesPagedResponse listInstances ( ProjectName parent ) { } }
ListInstancesRequest request = ListInstancesRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . build ( ) ; return listInstances ( request ) ;
public class AbstractGoogleClientRequest { /** * Create a request suitable for use against this service . */ private HttpRequest buildHttpRequest ( boolean usingHead ) throws IOException { } }
Preconditions . checkArgument ( uploader == null ) ; Preconditions . checkArgument ( ! usingHead || requestMethod . equals ( HttpMethods . GET ) ) ; String requestMethodToUse = usingHead ? HttpMethods . HEAD : requestMethod ; final HttpRequest httpRequest = getAbstractGoogleClient ( ) . getRequestFactory ( ) . buildRequest ( requestMethodToUse , buildHttpRequestUrl ( ) , httpContent ) ; new MethodOverride ( ) . intercept ( httpRequest ) ; httpRequest . setParser ( getAbstractGoogleClient ( ) . getObjectParser ( ) ) ; // custom methods may use POST with no content but require a Content - Length header if ( httpContent == null && ( requestMethod . equals ( HttpMethods . POST ) || requestMethod . equals ( HttpMethods . PUT ) || requestMethod . equals ( HttpMethods . PATCH ) ) ) { httpRequest . setContent ( new EmptyContent ( ) ) ; } httpRequest . getHeaders ( ) . putAll ( requestHeaders ) ; if ( ! disableGZipContent ) { httpRequest . setEncoding ( new GZipEncoding ( ) ) ; } final HttpResponseInterceptor responseInterceptor = httpRequest . getResponseInterceptor ( ) ; httpRequest . setResponseInterceptor ( new HttpResponseInterceptor ( ) { public void interceptResponse ( HttpResponse response ) throws IOException { if ( responseInterceptor != null ) { responseInterceptor . interceptResponse ( response ) ; } if ( ! response . isSuccessStatusCode ( ) && httpRequest . getThrowExceptionOnExecuteError ( ) ) { throw newExceptionOnError ( response ) ; } } } ) ; return httpRequest ;
public class HttpSupport { /** * Redirects to given controller , action " index " without any parameters . * @ param controllerClass controller class where to send redirect . * @ return { @ link HttpSupport . HttpBuilder } , to accept additional information . */ protected < T extends AppController > HttpBuilder redirect ( Class < T > controllerClass ) { } }
return redirect ( controllerClass , new HashMap ( ) ) ;
public class DescribeClusterSnapshotsRequest { /** * @ return */ public java . util . List < SnapshotSortingEntity > getSortingEntities ( ) { } }
if ( sortingEntities == null ) { sortingEntities = new com . amazonaws . internal . SdkInternalList < SnapshotSortingEntity > ( ) ; } return sortingEntities ;
public class LabelGenerator { /** * Makes sure that a given label will be unique amongst a set of other labels . */ static String generateUniqueLabel ( String label , Set < String > existingLabels ) { } }
StringBuilder newLabel = new StringBuilder ( label ) ; while ( existingLabels . contains ( newLabel . toString ( ) ) ) { newLabel . append ( POSTFIX ) ; } return newLabel . toString ( ) ;
public class AdUnit { /** * Sets the appliedLabelFrequencyCaps value for this AdUnit . * @ param appliedLabelFrequencyCaps * The set of label frequency caps applied directly to this ad * unit . There * is a limit of 10 label frequency caps per ad unit . */ public void setAppliedLabelFrequencyCaps ( com . google . api . ads . admanager . axis . v201811 . LabelFrequencyCap [ ] appliedLabelFrequencyCaps ) { } }
this . appliedLabelFrequencyCaps = appliedLabelFrequencyCaps ;
public class MoveAnalysis { @ Override public void visitAssert ( Stmt . Assert stmt , Boolean consumed ) { } }
visitExpression ( stmt . getCondition ( ) , false ) ;
public class GitHubProjectMojo { /** * Create client * Subclasses can override to do any custom client configuration * @ param hostname * @ return non - null client * @ throws MojoExecutionException */ protected GitHubClient createClient ( String hostname ) throws MojoExecutionException { } }
if ( ! hostname . contains ( "://" ) ) return new RateLimitedGitHubClient ( hostname ) ; try { URL hostUrl = new URL ( hostname ) ; return new RateLimitedGitHubClient ( hostUrl . getHost ( ) , hostUrl . getPort ( ) , hostUrl . getProtocol ( ) ) ; } catch ( MalformedURLException e ) { throw new MojoExecutionException ( "Could not parse host URL " + hostname , e ) ; }
public class DBFUtils { /** * Trims right spaces from string * @ param b _ array the string * @ return the string without right spaces */ public static byte [ ] trimRightSpaces ( byte [ ] b_array ) { } }
if ( b_array == null || b_array . length == 0 ) { return new byte [ 0 ] ; } int pos = getRightPos ( b_array ) ; int length = pos + 1 ; byte [ ] newBytes = new byte [ length ] ; System . arraycopy ( b_array , 0 , newBytes , 0 , length ) ; return newBytes ;
public class BeaconManager { /** * Binds an Android < code > Activity < / code > or < code > Service < / code > to the < code > BeaconService < / code > . The * < code > Activity < / code > or < code > Service < / code > must implement the < code > beaconConsumer < / code > interface so * that it can get a callback when the service is ready to use . * @ param consumer the < code > Activity < / code > or < code > Service < / code > that will receive the callback when the service is ready . */ public void bind ( @ NonNull BeaconConsumer consumer ) { } }
if ( ! isBleAvailableOrSimulated ( ) ) { LogManager . w ( TAG , "Method invocation will be ignored." ) ; return ; } synchronized ( consumers ) { ConsumerInfo newConsumerInfo = new ConsumerInfo ( ) ; ConsumerInfo alreadyBoundConsumerInfo = consumers . putIfAbsent ( consumer , newConsumerInfo ) ; if ( alreadyBoundConsumerInfo != null ) { LogManager . d ( TAG , "This consumer is already bound" ) ; } else { LogManager . d ( TAG , "This consumer is not bound. Binding now: %s" , consumer ) ; if ( mScheduledScanJobsEnabled ) { LogManager . d ( TAG , "Not starting beacon scanning service. Using scheduled jobs" ) ; consumer . onBeaconServiceConnect ( ) ; } else { LogManager . d ( TAG , "Binding to service" ) ; Intent intent = new Intent ( consumer . getApplicationContext ( ) , BeaconService . class ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . O && this . getForegroundServiceNotification ( ) != null ) { if ( isAnyConsumerBound ( ) ) { LogManager . i ( TAG , "Not starting foreground beacon scanning" + " service. A consumer is already bound, so it should be started" ) ; } else { LogManager . i ( TAG , "Starting foreground beacon scanning service." ) ; mContext . startForegroundService ( intent ) ; } } else { } consumer . bindService ( intent , newConsumerInfo . beaconServiceConnection , Context . BIND_AUTO_CREATE ) ; } LogManager . d ( TAG , "consumer count is now: %s" , consumers . size ( ) ) ; } }
public class DepsFileParser { /** * Parses the given file and returns a list of dependency information that it * contained . * It uses the passed in fileContents instead of reading the file . * @ param filePath Path to the file to parse . * @ param fileContents The contents to parse . * @ return A list of DependencyInfo objects . */ public List < DependencyInfo > parseFile ( String filePath , String fileContents ) { } }
return parseFileReader ( filePath , new StringReader ( fileContents ) ) ;
public class SliceUtf8 { /** * Does the slice contain only 7 - bit ASCII characters . */ public static boolean isAscii ( Slice utf8 ) { } }
int length = utf8 . length ( ) ; int offset = 0 ; // Length rounded to 8 bytes int length8 = length & 0x7FFF_FFF8 ; for ( ; offset < length8 ; offset += 8 ) { if ( ( utf8 . getLongUnchecked ( offset ) & TOP_MASK64 ) != 0 ) { return false ; } } // Enough bytes left for 32 bits ? if ( offset + 4 < length ) { if ( ( utf8 . getIntUnchecked ( offset ) & TOP_MASK32 ) != 0 ) { return false ; } offset += 4 ; } // Do the rest one by one for ( ; offset < length ; offset ++ ) { if ( ( utf8 . getByteUnchecked ( offset ) & 0x80 ) != 0 ) { return false ; } } return true ;
public class Delivery { /** * Show / Create a DialogFragment on the provided FragmentManager with * the given tag . * @ see android . app . DialogFragment # show ( android . app . FragmentManager , String ) * @ param manager the fragment manager used to add the Dialog into the UI * @ param tag the tag for the dialog fragment in the manager */ public void show ( FragmentManager manager , String tag ) { } }
mActiveMail = generateDialogFragment ( ) ; mActiveMail . show ( manager , tag ) ;
public class LinkUtil { /** * Determines the ' final URL ' of a link to a resource by traversing along the ' redirect ' properties . * @ param resource the addressed resource * @ param trace the list of paths traversed before ( to detect loops in redirects ) * @ return a ' final ' path or URL ; < code > null < / code > if no different target found * @ throws RedirectLoopException if a redirect loop has been detected */ protected static String getFinalTarget ( ResourceHandle resource , List < String > trace ) throws RedirectLoopException { } }
String finalTarget = null ; if ( resource . isValid ( ) ) { String path = resource . getPath ( ) ; if ( trace . contains ( path ) ) { // throw an exception if a loop has been detected throw new RedirectLoopException ( trace , path ) ; } // search for redirects and resolve them . . . String redirect = resource . getProperty ( PROP_TARGET ) ; if ( StringUtils . isBlank ( redirect ) ) { redirect = resource . getProperty ( PROP_REDIRECT ) ; } if ( StringUtils . isBlank ( redirect ) ) { // try to use the properties of a ' jcr : content ' child instead of the target resource itself ResourceHandle contentResource = resource . getContentResource ( ) ; if ( resource != contentResource ) { redirect = contentResource . getProperty ( PROP_TARGET ) ; if ( StringUtils . isBlank ( redirect ) ) { redirect = contentResource . getProperty ( PROP_REDIRECT ) ; } } } if ( StringUtils . isNotBlank ( redirect ) ) { trace . add ( path ) ; finalTarget = redirect ; // use the redirect target as the link URL if ( ! URL_PATTERN . matcher ( finalTarget ) . matches ( ) ) { // look forward if the redirect found points to another resource ResourceResolver resolver = resource . getResourceResolver ( ) ; Resource targetResource = resolver . getResource ( finalTarget ) ; if ( targetResource != null ) { String target = getFinalTarget ( ResourceHandle . use ( targetResource ) , trace ) ; if ( StringUtils . isNotBlank ( target ) ) { finalTarget = target ; } } } } } return finalTarget ;
public class PurandareFirstOrder { /** * Returns the index in the co - occurence matrix for this word . If the word * was not previously assigned an index , this method adds one for it and * returns that index . */ private final int getIndexFor ( String word ) { } }
Integer index = termToIndex . get ( word ) ; if ( index == null ) { synchronized ( this ) { // recheck to see if the term was added while blocking index = termToIndex . get ( word ) ; // if another thread has not already added this word while the // current thread was blocking waiting on the lock , then add it . if ( index == null ) { int i = wordIndexCounter ++ ; // Add a new counter for this term . Because the // wordIndexCounter starts at zero , so the next index will // be the last index in the termCounts list . termCounts . add ( new AtomicInteger ( 0 ) ) ; termToIndex . put ( word , i ) ; return i ; // avoid the auto - boxing to assign i to index } } } return index ;
public class CommercePriceListUserSegmentEntryRelLocalServiceUtil { /** * Deletes the commerce price list user segment entry rel from the database . Also notifies the appropriate model listeners . * @ param commercePriceListUserSegmentEntryRel the commerce price list user segment entry rel * @ return the commerce price list user segment entry rel that was removed * @ throws PortalException */ public static com . liferay . commerce . price . list . model . CommercePriceListUserSegmentEntryRel deleteCommercePriceListUserSegmentEntryRel ( com . liferay . commerce . price . list . model . CommercePriceListUserSegmentEntryRel commercePriceListUserSegmentEntryRel ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . deleteCommercePriceListUserSegmentEntryRel ( commercePriceListUserSegmentEntryRel ) ;
public class AccuracyClassificationPulldownAction { /** * Fill the classification menu . */ private void fillMenu ( ) { } }
isBugItem = new MenuItem ( menu , SWT . RADIO ) ; isBugItem . setText ( "Bug" ) ; notBugItem = new MenuItem ( menu , SWT . RADIO ) ; notBugItem . setText ( "Not Bug" ) ; isBugItem . addSelectionListener ( new SelectionAdapter ( ) { /* * ( non - Javadoc ) * @ see * org . eclipse . swt . events . SelectionAdapter # widgetSelected ( org . eclipse * . swt . events . SelectionEvent ) */ @ Override public void widgetSelected ( SelectionEvent e ) { if ( bugInstance != null ) { classifyWarning ( bugInstance , true ) ; } } } ) ; notBugItem . addSelectionListener ( new SelectionAdapter ( ) { /* * ( non - Javadoc ) * @ see * org . eclipse . swt . events . SelectionAdapter # widgetSelected ( org . eclipse * . swt . events . SelectionEvent ) */ @ Override public void widgetSelected ( SelectionEvent e ) { if ( bugInstance != null ) { classifyWarning ( bugInstance , false ) ; } } } ) ; menu . addMenuListener ( new MenuAdapter ( ) { @ Override public void menuShown ( MenuEvent e ) { // Before showing the menu , sync its contents // with the current BugInstance ( if any ) if ( DEBUG ) { System . out . println ( "Synchronizing menu!" ) ; } syncMenu ( ) ; } } ) ;
public class StatsFactory { /** * Registers a StatsTemplateLookup object with the PMI service ( WebSphere internal use only ) . * @ param lookupClass An instance of { @ link com . ibm . wsspi . pmi . factory . StatsTemplateLookup } */ public static void registerStatsTemplateLookup ( StatsTemplateLookup lookupClass ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , new StringBuffer ( "registerStatsTemplateLookup: " ) . append ( lookupClass . getClass ( ) . getName ( ) ) . toString ( ) ) ; PerfModules . registerTemplateLookupClass ( lookupClass ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "registerStatsTemplateLookup" ) ;
public class JsonObject { /** * 添加结果数据 * @ param key * @ param value */ public JsonObject < V > addData ( String key , Object value ) { } }
if ( key == null ) { return this ; } if ( this . data == null ) { this . data = new HashMap < String , Object > ( ) ; } this . data . put ( key , value ) ; return this ;
public class RestClient { /** * This method will issue a HEAD to all of the assets in massive and returns the { @ link HttpURLConnection # getHeaderFields ( ) } . * @ return The { @ link HttpURLConnection # getHeaderFields ( ) } for the HEAD request * @ throws IOException * @ throws RequestFailureException if the response code is not OK */ public Map < String , List < String > > getAllAssetsMetadata ( ) throws IOException , RequestFailureException { } }
HttpURLConnection connection = createHeadConnection ( "/assets" ) ; testResponseCode ( connection ) ; return connection . getHeaderFields ( ) ;
public class TypeInference { /** * For functions that use template types , specialize the function type for the call target based * on the call - site specific arguments . Specifically , this enables inference to set the type of * any function literal parameters based on these inferred types . */ private boolean inferTemplatedTypesForCall ( Node n , FunctionType fnType , FlowScope scope ) { } }
ImmutableList < TemplateType > keys = fnType . getTemplateTypeMap ( ) . getTemplateKeys ( ) ; if ( keys . isEmpty ( ) ) { return false ; } // Try to infer the template types Map < TemplateType , JSType > rawInferrence = inferTemplateTypesFromParameters ( fnType , n , scope ) ; Map < TemplateType , JSType > inferred = Maps . newIdentityHashMap ( ) ; for ( TemplateType key : keys ) { JSType type = rawInferrence . get ( key ) ; if ( type == null ) { type = unknownType ; } inferred . put ( key , type ) ; } // Try to infer the template types using the type transformations Map < TemplateType , JSType > typeTransformations = evaluateTypeTransformations ( keys , inferred , scope ) ; if ( typeTransformations != null ) { inferred . putAll ( typeTransformations ) ; } // Replace all template types . If we couldn ' t find a replacement , we // replace it with UNKNOWN . TemplateTypeReplacer replacer = new TemplateTypeReplacer ( registry , inferred ) ; Node callTarget = n . getFirstChild ( ) ; FunctionType replacementFnType = fnType . visit ( replacer ) . toMaybeFunctionType ( ) ; checkNotNull ( replacementFnType ) ; callTarget . setJSType ( replacementFnType ) ; n . setJSType ( replacementFnType . getReturnType ( ) ) ; return replacer . madeChanges ;
public class CachedCertDAO { /** * Pass through Cert ID Lookup * @ param trans * @ param ns * @ return */ public Result < List < CertDAO . Data > > readID ( AuthzTrans trans , final String id ) { } }
return dao ( ) . readID ( trans , id ) ;
public class DirectoryScanner { /** * Process included file . * @ param name path of the file relative to the directory of the FileSet . * @ param file included File . */ private void accountForIncludedFile ( String name , File file ) { } }
processIncluded ( name , file , filesIncluded , filesExcluded , filesDeselected ) ;
public class DCEventSource { /** * This removes a specified listener for Invalidation listener . If it was not already registered * then this call is ignored . * @ param listener The listener to be removed . */ public void removeListener ( InvalidationListener listener ) { } }
synchronized ( hsInvalidationListeners ) { hsInvalidationListeners . remove ( listener ) ; invalidationListenerCount = hsInvalidationListeners . size ( ) ; bUpdateInvalidationListener = true ; }
public class FeatureExpressionServiceImpl { /** * Fetch the specified expression from the cache or create it if necessary . * @ param expressionString the expression string * @ return the expression * @ throws ParseException oops */ private Expression getExpression ( String expressionString ) throws ParseException { } }
if ( ! expressionCache . containsKey ( expressionString ) ) { Expression expression ; expression = parser . parseExpression ( expressionString ) ; expressionCache . put ( expressionString , expression ) ; } return expressionCache . get ( expressionString ) ;
public class SQLRunner { /** * Adds the where . */ private void addWhere4ObjectPrint ( final ObjectPrint _print ) { } }
final SQLWhere where = sqlSelect . getWhere ( ) ; where . addCriteria ( 0 , "ID" , Comparison . EQUAL , String . valueOf ( _print . getInstance ( ) . getId ( ) ) , Connection . AND ) ;
public class CommonMpJwtFat { /** * Adds expectations for specific claims that we ' ll find in the JWTs that we test with . * We check to see that the various forms of injection retrieve the claims properly * TODO - replace jwtTokenTools * @ param jwtTokenTools * @ param testAppClass - the test class invoked * @ return - returns the expectations for specific claims * @ throws Exception */ public Expectations addClaimExpectations ( JwtTokenForTest jwtTokenTools , String testAppClass ) throws Exception { } }
try { Expectations expectations = new Expectations ( ) ; if ( ! testAppClass . contains ( "ClaimInjection" ) || ( testAppClass . contains ( "ClaimInjection" ) && testAppClass . contains ( "RequestScoped" ) ) ) { expectations . addExpectation ( addApiOutputExpectation ( "getRawToken" , MpJwtFatConstants . MP_JWT_TOKEN , null , jwtTokenTools . getJwtTokenString ( ) ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getIssuer" , MpJwtFatConstants . JWT_BUILDER_ISSUER , PayloadConstants . ISSUER ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getSubject" , MpJwtFatConstants . JWT_BUILDER_SUBJECT , PayloadConstants . SUBJECT ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getTokenID" , MpJwtFatConstants . JWT_BUILDER_JWTID , PayloadConstants . JWTID ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getExpirationTime" , MpJwtFatConstants . JWT_BUILDER_EXPIRATION , PayloadConstants . EXPIRATION_TIME_IN_SECS ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getIssuedAtTime" , MpJwtFatConstants . JWT_BUILDER_ISSUED_AT , PayloadConstants . ISSUED_AT_TIME_IN_SECS ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getAudience" , MpJwtFatConstants . JWT_BUILDER_AUDIENCE , PayloadConstants . AUDIENCE ) ) ; expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getGroups" , MpJwtFatConstants . PAYLOAD_GROUPS , "groups" ) ) ; // we won ' t have a list of claims to check for ClaimInjection , we don ' t use the api to retrieve the claims and there is no injected claim that lists all claims . . . if ( ! testAppClass . contains ( "ClaimInjection" ) ) { for ( String key : jwtTokenTools . getPayloadClaims ( ) ) { expectations . addExpectations ( addApiOutputExpectation ( jwtTokenTools , "getClaim" , MpJwtFatConstants . JWT_BUILDER_CLAIM , key ) ) ; } } } return expectations ; } catch ( Exception e ) { Log . info ( thisClass , "addClaimExpectations" , "Failed building expectations: " + e . getMessage ( ) ) ; throw e ; }
public class MediaDetails { /** * Check whether the media seems to have changed since a saved version of it was used . We ignore changes in * free space because those probably just reflect history entries being added . * @ param originalMedia the media details when information about it was saved * @ return true if there have been detectable significant changes to the media since it was saved * @ throws IllegalArgumentException if the { @ link # hashKey ( ) } values of the media detail objects differ */ public boolean hasChanged ( MediaDetails originalMedia ) { } }
if ( ! hashKey ( ) . equals ( originalMedia . hashKey ( ) ) ) { throw new IllegalArgumentException ( "Can't compare media details with different hashKey values" ) ; } return playlistCount != originalMedia . playlistCount || trackCount != originalMedia . trackCount ;
public class SimplifySpanBuild { /** * append multi clickable SpecialUnit or String to first ( Behind the existing BeforeContent ) * @ param specialClickableUnit SpecialClickableUnit * @ param specialUnitOrStrings Unit Or String * @ return */ public SimplifySpanBuild appendMultiClickableToFirst ( SpecialClickableUnit specialClickableUnit , Object ... specialUnitOrStrings ) { } }
processMultiClickableSpecialUnit ( true , specialClickableUnit , specialUnitOrStrings ) ; return this ;
public class JsonObjectExpectation { /** * Verifies that the specified key is present ( if expected to be present ) or is not present ( if not expected to be present ) in * the JSON data . */ private void verifyKeyExistenceMatchesExpectation ( JsonObject jsonDataToValidate ) { } }
boolean jsonDataContainsKey = jsonDataToValidate . containsKey ( validationKey ) ; if ( expCheckType == JsonCheckType . KEY_DOES_NOT_EXIST ) { assertFalse ( "The provided content contains a \"" + validationKey + "\" key but should not. The content to validate was: [" + jsonDataToValidate + "]." , jsonDataContainsKey ) ; } else { assertTrue ( "The provided content does not contain a \"" + validationKey + "\" key but should. The content to validate was: [" + jsonDataToValidate + "]." , jsonDataContainsKey ) ; }