signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class RottenTomatoesApi { /** * Retrieves current release DVDs
* @ param country Provides localized data for the selected country
* @ param page The selected page of in theaters movies
* @ param pageLimit The amount of movies in theaters to show per page
* @ return
* @ throws RottenTomatoesException */
public List < RTMovie > getCurrentReleaseDvds ( String country , int page , int pageLimit ) throws RottenTomatoesException { } } | properties . clear ( ) ; properties . put ( ApiBuilder . PROPERTY_URL , URL_CURRENT_RELEASE_DVDS ) ; properties . put ( ApiBuilder . PROPERTY_COUNTRY , ApiBuilder . validateCountry ( country ) ) ; properties . put ( ApiBuilder . PROPERTY_PAGE , ApiBuilder . validatePage ( page ) ) ; properties . put ( ApiBuilder . PROPERTY_PAGE_LIMIT , ApiBuilder . validatePageLimit ( pageLimit ) ) ; WrapperLists wrapper = response . getResponse ( WrapperLists . class , properties ) ; if ( wrapper != null && wrapper . getMovies ( ) != null ) { return wrapper . getMovies ( ) ; } else { return Collections . emptyList ( ) ; } |
public class ZooKeeperUtilityFactory { /** * Creates a { @ link ZooKeeperStateHandleStore } instance with the provided arguments .
* @ param zkStateHandleStorePath specifying the path in ZooKeeper to store the state handles to
* @ param stateStorageHelper storing the actual state data
* @ param < T > Type of the state to be stored
* @ return a ZooKeeperStateHandleStore instance
* @ throws Exception if ZooKeeper could not create the provided state handle store path in
* ZooKeeper */
public < T extends Serializable > ZooKeeperStateHandleStore < T > createZooKeeperStateHandleStore ( String zkStateHandleStorePath , RetrievableStateStorageHelper < T > stateStorageHelper ) throws Exception { } } | return ZooKeeperUtils . createZooKeeperStateHandleStore ( facade , zkStateHandleStorePath , stateStorageHelper ) ; |
public class ProxySettings { /** * Set credentials for authentication at the proxy server .
* This method is an alias of { @ link # setId ( String ) setId } { @ code
* ( id ) . } { @ link # setPassword ( String ) setPassword } { @ code
* ( password ) } .
* @ param id
* The ID .
* @ param password
* The password .
* @ return
* { @ code this } object . */
public ProxySettings setCredentials ( String id , String password ) { } } | return setId ( id ) . setPassword ( password ) ; |
public class SeleniumComponent { /** * Verify if the element loaded is a valid element for the class that is
* representing it . */
public void validateElementTag ( ) { } } | String errorMsg = String . format ( ErrorMessages . ERROR_INVALID_TAG_TO_CLASS , getElement ( ) . getTagName ( ) ) ; if ( ! isValidElementTag ( ) ) { throw new IllegalArgumentException ( errorMsg ) ; } |
public class Validate { /** * Checks if the given integer is NOT positive .
* @ param value The integer value to validate .
* @ throws ParameterException if the given integer value is < code > null < / code > or bigger than 0. */
public static void notPositive ( Integer value ) { } } | if ( ! validation ) return ; notNull ( value ) ; if ( value > 0 ) throw new ParameterException ( ErrorCode . POSITIVE ) ; |
public class CallableStatementProxy { /** * used only with java 7 , do not set @ Override */
public < T > T getObject ( int parameterIndex , Class < T > type ) throws SQLException { } } | // used reflection to make sure this work with Java 5 and 6
try { return ( T ) stat . getClass ( ) . getMethod ( "getObject" , new Class [ ] { int . class , Class . class } ) . invoke ( stat , new Object [ ] { parameterIndex , type } ) ; } catch ( Throwable t ) { ExceptionUtil . rethrowIfNecessary ( t ) ; if ( t instanceof InvocationTargetException && ( ( InvocationTargetException ) t ) . getTargetException ( ) instanceof SQLException ) throw ( SQLException ) ( ( InvocationTargetException ) t ) . getTargetException ( ) ; throw new PageRuntimeException ( Caster . toPageException ( t ) ) ; } |
public class Examples { /** * Get instruments for the desired exchange . */
public void getInstrumentsForExchange ( KiteConnect kiteConnect ) throws KiteException , IOException { } } | // Get instruments for an exchange .
List < Instrument > nseInstruments = kiteConnect . getInstruments ( "CDS" ) ; System . out . println ( nseInstruments . size ( ) ) ; |
public class JobMetrics { /** * Get a new { @ link GobblinMetrics } instance for a given job .
* @ param jobState the given { @ link JobState } instance
* @ param parentContext is the parent { @ link MetricContext }
* @ return a { @ link JobMetrics } instance */
public static JobMetrics get ( final JobState jobState , final MetricContext parentContext ) { } } | return ( JobMetrics ) GOBBLIN_METRICS_REGISTRY . getOrDefault ( name ( jobState ) , new Callable < GobblinMetrics > ( ) { @ Override public GobblinMetrics call ( ) throws Exception { return new JobMetrics ( jobState , parentContext ) ; } } ) ; |
public class PowerMock { /** * Convenience method for createMock followed by expectNew .
* @ param type The class that should be mocked .
* @ param arguments The constructor arguments .
* @ return A mock object of the same type as the mock .
* @ throws Exception */
public static synchronized < T > T createMockAndExpectNew ( Class < T > type , Object ... arguments ) throws Exception { } } | T mock = createMock ( type ) ; expectNew ( type , arguments ) . andReturn ( mock ) ; return mock ; |
public class LongestAliphaticChainDescriptor { /** * Calculate the count of atoms of the longest aliphatic chain in the supplied { @ link IAtomContainer } .
* The method require one parameter :
* if checkRingSyste is true the CDKConstant . ISINRING will be set
* @ param mol The { @ link IAtomContainer } for which this descriptor is to be calculated
* @ return the number of atoms in the longest aliphatic chain of this AtomContainer
* @ see # setParameters */
@ Override public DescriptorValue calculate ( IAtomContainer mol ) { } } | if ( checkRingSystem ) Cycles . markRingAtomsAndBonds ( mol ) ; IAtomContainer aliphaticParts = mol . getBuilder ( ) . newAtomContainer ( ) ; for ( IAtom atom : mol . atoms ( ) ) { if ( isAcyclicCarbon ( atom ) ) aliphaticParts . addAtom ( atom ) ; } for ( IBond bond : mol . bonds ( ) ) { if ( isAcyclicCarbon ( bond . getBegin ( ) ) && isAcyclicCarbon ( bond . getEnd ( ) ) ) aliphaticParts . addBond ( bond ) ; } int longest = 0 ; final int [ ] [ ] adjlist = GraphUtil . toAdjList ( aliphaticParts ) ; for ( int i = 0 ; i < adjlist . length ; i ++ ) { // atom deg > 1 can ' t find the longest chain
if ( adjlist [ i ] . length != 1 ) continue ; int length = getMaxDepth ( adjlist , i , - 1 ) ; if ( length > longest ) longest = length ; } return new DescriptorValue ( getSpecification ( ) , getParameterNames ( ) , getParameters ( ) , new IntegerResult ( longest ) , getDescriptorNames ( ) ) ; |
public class DatabaseMetaData { /** * { @ inheritDoc } */
public ResultSet getColumns ( final String catalog , final String schemaPattern , final String tableNamePattern , final String columnNamePattern ) throws SQLException { } } | return RowLists . rowList21 ( String . class , String . class , String . class , String . class , Integer . class , String . class , Integer . class , Integer . class , Integer . class , Integer . class , String . class , String . class , Integer . class , Integer . class , Integer . class , Integer . class , String . class , String . class , String . class , String . class , Short . class ) . withLabel ( 1 , "TABLE_CAT" ) . withLabel ( 2 , "TABLE_SCHEM" ) . withLabel ( 3 , "TABLE_NAME" ) . withLabel ( 4 , "COLUMN_NAME" ) . withLabel ( 5 , "DATA_TYPE" ) . withLabel ( 6 , "TYPE_NAME" ) . withLabel ( 7 , "BUFFER_LENGTH" ) . withLabel ( 8 , "DECIMAL_DIGITS" ) . withLabel ( 9 , "NUM_PREC_RADIX" ) . withLabel ( 10 , "NULLABLE" ) . withLabel ( 11 , "REMARKS" ) . withLabel ( 12 , "COLUMN_DEF" ) . withLabel ( 13 , "SQL_DATA_TYPE" ) . withLabel ( 14 , "SQL_DATETIME_SUB" ) . withLabel ( 15 , "CHAR_OCTET_LENGTH" ) . withLabel ( 16 , "ORDINAL_POSITION" ) . withLabel ( 17 , "IS_NULLABLE" ) . withLabel ( 18 , "SCOPE_CATLOG" ) . withLabel ( 19 , "SCOPE_SCHEMA" ) . withLabel ( 20 , "SCOPE_TABLE" ) . withLabel ( 21 , "SOURCE_DATA_TYPE" ) . resultSet ( ) ; |
public class BaseMessageEndpointFactory { /** * LI2110.56 - rewrote method to use new reuse parameter . */
public void returnInvocationHandler ( MessageEndpointBase endpoint , boolean reuse ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "MEF.returnInvocationHandler" ) ; // Use pool only if one was created .
if ( ivInvocationHandlerPool != null ) // d423445.1
{ if ( reuse ) { MessageEndpointBase . reset ( endpoint ) ; // F73236 PooledObject removed , call reset ( ) directly
ivInvocationHandlerPool . put ( endpoint ) ; } // f743-7046 start
// Notify any thread , but only 1 thread since release
// only occurred on one MessageEndpoint , that is blocked
// in createEndpoint method waiting for a release method
// to be called on some MessageEndpoint proxy .
synchronized ( ivStateLock ) { // d643869
// According to the JCA 1.5 spec section 12.5 the proxy
// instance can be pooled and reused by the same resource adapter .
// The proxy instance is not freed here and the count is
// not decremented . The discard method handles freeing
// proxy instances that are pooled and decrementing the count .
// - - ivNumberOfMessageEndpointsCreated ;
ivStateLock . notify ( ) ; } // f743-7046 end
} // d643869
// Decrement the ivNumberOfMessageEndpointsCreated and null the proxy
// because the MessageEndpointHandler was not returned to the pool
// and will be garbage collected by the JVM
if ( ivInvocationHandlerPool == null || ! reuse ) { synchronized ( ivStateLock ) { -- ivNumberOfMessageEndpointsCreated ; // endpointBase . ivProxy = null ;
} } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "MEF.returnInvocationHandler" ) ; // d643869 |
public class CommerceNotificationTemplateUtil { /** * Returns all the commerce notification templates that the user has permission to view where groupId = & # 63 ; and type = & # 63 ; and enabled = & # 63 ; .
* @ param groupId the group ID
* @ param type the type
* @ param enabled the enabled
* @ return the matching commerce notification templates that the user has permission to view */
public static List < CommerceNotificationTemplate > filterFindByG_T_E ( long groupId , String type , boolean enabled ) { } } | return getPersistence ( ) . filterFindByG_T_E ( groupId , type , enabled ) ; |
public class EnforcerRuleUtils { /** * Gets the pom model for this file .
* @ param pom the pom
* @ return the model
* @ throws IOException Signals that an I / O exception has occurred .
* @ throws XmlPullParserException the xml pull parser exception */
private Model readModel ( File pom ) throws IOException , XmlPullParserException { } } | Reader reader = ReaderFactory . newXmlReader ( pom ) ; MavenXpp3Reader xpp3 = new MavenXpp3Reader ( ) ; Model model = null ; try { model = xpp3 . read ( reader ) ; } finally { reader . close ( ) ; reader = null ; } return model ; |
public class TypefaceHelper { /** * Set the typeface to the all text views belong to the view group .
* @ param context the context .
* @ param layoutRes the layout resource id .
* @ param typefaceName typeface name .
* @ param style the typeface style .
* @ return the view . */
public View setTypeface ( Context context , @ LayoutRes int layoutRes , String typefaceName , int style ) { } } | return setTypeface ( context , layoutRes , null , typefaceName , 0 ) ; |
public class PrefMapFill { /** * Preference */
@ Override public void apply ( MapTile map ) { } } | final double tw = map . getTileWidth ( ) ; final double th = map . getTileHeight ( ) ; for ( int tx = 0 ; tx < map . getInTileWidth ( ) ; tx ++ ) { final double x = tx * tw ; for ( int ty = 0 ; ty < map . getInTileHeight ( ) ; ty ++ ) { final double y = ty * th ; final Tile tile = map . createTile ( sheet , number , x , y ) ; map . setTile ( tile ) ; } } |
public class ExternalChildResourcesCachedImpl { /** * Prepare for inline definition of a new external child resource ( along with the definition or update of parent resource ) .
* @ param name the name of the new external child resource
* @ param key the key
* @ return the child resource */
protected final FluentModelTImpl prepareInlineDefine ( String name , String key ) { } } | if ( find ( key ) != null ) { throw new IllegalArgumentException ( "A child resource ('" + childResourceName + "') with name (key) '" + name + " (" + key + ")' already exists" ) ; } FluentModelTImpl childResource = newChildResource ( name ) ; childResource . setPendingOperation ( ExternalChildResourceImpl . PendingOperation . ToBeCreated ) ; return super . prepareForFutureCommitOrPostRun ( childResource ) ; |
public class StringDictionaryEncoder { /** * Returns the size of the int arrays used by this class , it ' s 4 times the length of the arrays */
private int getSizeOfIntArrays ( ) { } } | return ( offsets . length + hashcodes . length + nexts . length + counts . length + indexStrides . length ) * 4 ; |
public class AbstractSendCommandJob { /** * Sends a command to the Gerrit server , returning the output from the command .
* @ param command the command .
* @ return the output from the command . */
@ Override public String sendCommandStr ( String command ) { } } | String str = null ; try { str = sendCommand2 ( command ) ; } catch ( Exception ex ) { logger . error ( "Could not run command " + command , ex ) ; } return str ; |
public class LottieCompositionFactory { /** * Return a LottieComposition for the specified json . */
@ WorkerThread public static LottieResult < LottieComposition > fromJsonReaderSync ( JsonReader reader , @ Nullable String cacheKey ) { } } | return fromJsonReaderSyncInternal ( reader , cacheKey , true ) ; |
public class NioUtils { /** * 将byte [ ] 数据写入到流中 */
public static void write ( byte [ ] data , OutputStream output ) throws IOException { } } | ByteArrayInputStream input = null ; try { input = new ByteArrayInputStream ( data ) ; copy ( input , output ) ; output . flush ( ) ; } finally { IOUtils . closeQuietly ( output ) ; } |
public class RedisGetSetClient { /** * Gets a content by a key .
* @ param key
* cache key
* @ return Promise with cached value ( or null , the returned Promise also can
* be null ) */
public final Promise get ( String key ) { } } | byte [ ] binaryKey = key . getBytes ( StandardCharsets . UTF_8 ) ; if ( client != null ) { return new Promise ( client . get ( binaryKey ) ) ; } if ( clusteredClient != null ) { return new Promise ( clusteredClient . get ( binaryKey ) ) ; } return Promise . resolve ( ) ; |
public class hqlLexer { /** * $ ANTLR start " ELSE " */
public final void mELSE ( ) throws RecognitionException { } } | try { int _type = ELSE ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 22:6 : ( ' else ' )
// hql . g : 22:8 : ' else '
{ match ( "else" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class GetDevicesInPlacementRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetDevicesInPlacementRequest getDevicesInPlacementRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getDevicesInPlacementRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getDevicesInPlacementRequest . getProjectName ( ) , PROJECTNAME_BINDING ) ; protocolMarshaller . marshall ( getDevicesInPlacementRequest . getPlacementName ( ) , PLACEMENTNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class AsmCounter { /** * flush temp counter data to all windows & assoc metrics . */
protected void doFlush ( ) { } } | long v ; synchronized ( unFlushed ) { v = unFlushed . getCount ( ) ; } for ( Counter counter : counterMap . values ( ) ) { counter . inc ( v ) ; } if ( MetricUtils . metricAccurateCal ) { for ( AsmMetric assocMetric : assocMetrics ) { assocMetric . updateDirectly ( v ) ; } } this . unFlushed . dec ( v ) ; |
public class NodeBuilder { /** * Inserts or replaces the provided key , copying all not - yet - visited keys prior to it into our buffer .
* @ param key key we are inserting / replacing
* @ return the NodeBuilder to retry the update against ( a child if we own the range being updated ,
* a parent if we do not - - we got here from an earlier key - - and we need to ascend back up ) ,
* or null if we finished the update in this node . */
NodeBuilder update ( Object key ) { } } | assert copyFrom != null ; int copyFromKeyEnd = getKeyEnd ( copyFrom ) ; int i = copyFromKeyPosition ; boolean found ; // exact key match ?
boolean owns = true ; // true iff this node ( or a child ) should contain the key
if ( i == copyFromKeyEnd ) { found = false ; } else { // this optimisation is for the common scenario of updating an existing row with the same columns / keys
// and simply avoids performing a binary search until we ' ve checked the proceeding key ;
// possibly we should disable this check if we determine that it fails more than a handful of times
// during any given builder use to get the best of both worlds
int c = - comparator . compare ( key , copyFrom [ i ] ) ; if ( c >= 0 ) { found = c == 0 ; } else { i = find ( comparator , key , copyFrom , i + 1 , copyFromKeyEnd ) ; found = i >= 0 ; if ( ! found ) i = - i - 1 ; } } if ( found ) { Object prev = copyFrom [ i ] ; Object next = updateFunction . apply ( prev , key ) ; // we aren ' t actually replacing anything , so leave our state intact and continue
if ( prev == next ) return null ; key = next ; } else if ( i == copyFromKeyEnd && compare ( comparator , key , upperBound ) >= 0 ) owns = false ; if ( isLeaf ( copyFrom ) ) { if ( owns ) { // copy keys from the original node up to prior to the found index
copyKeys ( i ) ; if ( found ) { // if found , we ' ve applied updateFunction already
replaceNextKey ( key ) ; } else { // if not found , we need to apply updateFunction still
key = updateFunction . apply ( key ) ; addNewKey ( key ) ; // handles splitting parent if necessary via ensureRoom
} // done , so return null
return null ; } else { // we don ' t want to copy anything if we ' re ascending and haven ' t copied anything previously ,
// as in this case we can return the original node . Leaving buildKeyPosition as 0 indicates
// to buildFromRange that it should return the original instead of building a new node
if ( buildKeyPosition > 0 ) copyKeys ( i ) ; } // if we don ' t own it , all we need to do is ensure we ' ve copied everything in this node
// ( which we have done , since not owning means pos > = keyEnd ) , ascend , and let Modifier . update
// retry against the parent node . The if / ascend after the else branch takes care of that .
} else { // branch
if ( found ) { copyKeys ( i ) ; replaceNextKey ( key ) ; copyChildren ( i + 1 ) ; return null ; } else if ( owns ) { copyKeys ( i ) ; copyChildren ( i ) ; // belongs to the range owned by this node , but not equal to any key in the node
// so descend into the owning child
Object newUpperBound = i < copyFromKeyEnd ? copyFrom [ i ] : upperBound ; Object [ ] descendInto = ( Object [ ] ) copyFrom [ copyFromKeyEnd + i ] ; ensureChild ( ) . reset ( descendInto , newUpperBound , updateFunction , comparator ) ; return child ; } else if ( buildKeyPosition > 0 || buildChildPosition > 0 ) { // ensure we ' ve copied all keys and children , but only if we ' ve already copied something .
// otherwise we want to return the original node
copyKeys ( copyFromKeyEnd ) ; copyChildren ( copyFromKeyEnd + 1 ) ; // since we know that there are exactly 1 more child nodes , than keys
} } return ascend ( ) ; |
public class MetaLocale { /** * Constructs a MetaLocale from a Java Locale object . */
public static MetaLocale fromJavaLocale ( java . util . Locale java ) { } } | // Some confusing cases can arise here based on the getLanguage ( ) method
// returning the deprecated language codes in a handful of cases . See
// MetaLocaleTest for test cases for these examples .
String language = java . getLanguage ( ) ; switch ( language ) { case "iw" : language = "he" ; break ; case "in" : language = "id" ; break ; case "ji" : language = "yi" ; break ; } return new MetaLocale ( language , java . getScript ( ) , java . getCountry ( ) , java . getVariant ( ) ) ; |
public class JMXSecurityMBeanServer { /** * { @ inheritDoc } */
@ Override public AttributeList getAttributes ( ObjectName name , String [ ] attributes ) throws InstanceNotFoundException , ReflectionException { } } | if ( ! isAuthorized ( ) ) { throwAuthzException ( ) ; } return super . getAttributes ( name , attributes ) ; |
public class LasWriterEachPoint { private byte [ ] getDouble ( double num ) { } } | doubleBb . clear ( ) ; doubleBb . putDouble ( num ) ; byte [ ] array = doubleBb . array ( ) ; return array ; |
public class ProxyIdpAuthnContextServiceImpl { /** * A Proxy - IdP may communicate with an IdP that uses different URI declarations for the same type of authentication
* methods , e . g . , the Swedish eID framework and eIDAS has different URI : s for the same type of authentication . This
* method will enable tranformation of URI : s and provide the possibility to match URI : s from different schemes .
* The default implementation just checks if the supplied { @ code uri } is part of the { @ code assuranceURIs } list . To
* implement different behaviour override this method .
* @ param context
* the request context
* @ param uri
* the URI to test
* @ param assuranceURIs
* IdP assurance certification URI : s
* @ return { @ code true } if there is a match , and { @ code false } otherwise */
protected boolean isSupported ( ProfileRequestContext < ? , ? > context , String uri , List < String > assuranceURIs ) { } } | return assuranceURIs . contains ( uri ) ; |
public class ResponseMessage { /** * @ see javax . servlet . http . HttpServletResponse # setIntHeader ( java . lang . String , int ) */
@ Override public void setIntHeader ( String hdr , int value ) { } } | if ( - 1 == value ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "setIntHeader(" + hdr + ", -1), removing header" ) ; } this . response . removeHeader ( hdr ) ; } else { this . response . setHeader ( hdr , Integer . toString ( value ) ) ; } |
public class ResourceUtils { /** * Finds the resource directory for an instance .
* @ param applicationFilesDirectory the application ' s directory
* @ param instance an instance
* @ return a non - null file ( that may not exist ) */
public static File findInstanceResourcesDirectory ( File applicationFilesDirectory , Instance instance ) { } } | return findInstanceResourcesDirectory ( applicationFilesDirectory , instance . getComponent ( ) ) ; |
public class ContentSpec { /** * Sets the path of the Brand Logo for the Content Specification .
* @ param brandLogo The path to the Brand Logo . */
public void setBrandLogo ( final String brandLogo ) { } } | if ( brandLogo == null && this . brandLogo == null ) { return ; } else if ( brandLogo == null ) { removeChild ( this . brandLogo ) ; this . brandLogo = null ; } else if ( this . brandLogo == null ) { this . brandLogo = new KeyValueNode < String > ( CommonConstants . CS_BRAND_LOGO_TITLE , brandLogo ) ; appendChild ( this . brandLogo , false ) ; } else { this . brandLogo . setValue ( brandLogo ) ; } |
public class AbstractThreadPoolService { /** * Set configurations . This methods sets / overrides both the default configurations and the per - pool configurations .
* @ param commonPrefixthe common prefix in the keys , for example , " threadPools . "
* @ param resolverthe configuration properties resolver */
protected void setConfigurations ( String commonPrefix , PropertySourcesPropertyResolver resolver ) { } } | setConfigurationsCommonPrefix ( commonPrefix ) ; setConfigurations ( resolver ) ; setDefaultCoreSize ( resolver . getProperty ( commonPrefix + "defaultCoreSize" , Integer . class , defaultCoreSize ) ) ; setDefaultMaxSize ( resolver . getProperty ( commonPrefix + "defaultMaxSize" , Integer . class , defaultMaxSize ) ) ; setDefaultKeepAliveSeconds ( resolver . getProperty ( commonPrefix + "defaultKeepAliveSeconds" , Long . class , defaultKeepAliveSeconds ) ) ; setDefaultQueueSize ( resolver . getProperty ( commonPrefix + "defaultQueueSize" , Integer . class , defaultQueueSize ) ) ; setDefaultAllowCoreThreadTimeout ( resolver . getProperty ( commonPrefix + "defaultAllowCoreThreadTimeout" , Boolean . class , defaultAllowCoreThreadTimeout ) ) ; setShutdownWaitSeconds ( resolver . getProperty ( commonPrefix + "shutdownWaitSeconds" , Long . class , shutdownWaitSeconds ) ) ; |
public class LensDistortionOps_F64 { /** * Creates a { @ link Point2Transform2 _ F32 } for converting pixels from original camera model into a new synthetic
* model . The scaling of the image can be adjusted to ensure certain visibility requirements .
* @ param type The type of adjustment it will apply to the transform
* @ param paramOriginal Camera model for the current image
* @ param paramDesired Desired camera model for the distorted image
* @ param desiredToOriginal If true then the transform ' s input is assumed to be pixels in the desired
* image and the output will be in original image , if false then the reverse transform
* is returned .
* @ param paramMod The modified camera model to meet the requested visibility requirements . Null if you don ' t want it .
* @ return The requested transform */
public static < O extends CameraPinhole , D extends CameraPinhole > Point2Transform2_F64 transformChangeModel ( AdjustmentType type , O paramOriginal , D paramDesired , boolean desiredToOriginal , D paramMod ) { } } | LensDistortionNarrowFOV original = LensDistortionFactory . narrow ( paramOriginal ) ; LensDistortionNarrowFOV desired = LensDistortionFactory . narrow ( paramDesired ) ; Point2Transform2_F64 ori_p_to_n = original . undistort_F64 ( true , false ) ; Point2Transform2_F64 des_n_to_p = desired . distort_F64 ( false , true ) ; Point2Transform2_F64 ori_to_des = new SequencePoint2Transform2_F64 ( ori_p_to_n , des_n_to_p ) ; Point2D_F64 work = new Point2D_F64 ( ) ; RectangleLength2D_F64 bound ; if ( type == AdjustmentType . FULL_VIEW ) { bound = DistortImageOps . boundBox_F64 ( paramOriginal . width , paramOriginal . height , new PointToPixelTransform_F64 ( ori_to_des ) , work ) ; } else if ( type == AdjustmentType . EXPAND ) { bound = LensDistortionOps_F64 . boundBoxInside ( paramOriginal . width , paramOriginal . height , new PointToPixelTransform_F64 ( ori_to_des ) , work ) ; // ensure there are no strips of black
LensDistortionOps_F64 . roundInside ( bound ) ; } else if ( type == AdjustmentType . CENTER ) { bound = LensDistortionOps_F64 . centerBoxInside ( paramOriginal . width , paramOriginal . height , new PointToPixelTransform_F64 ( ori_to_des ) , work ) ; } else if ( type == AdjustmentType . NONE ) { bound = new RectangleLength2D_F64 ( 0 , 0 , paramDesired . width , paramDesired . height ) ; } else { throw new IllegalArgumentException ( "Unsupported type " + type ) ; } double scaleX = bound . width / paramDesired . width ; double scaleY = bound . height / paramDesired . height ; double scale ; if ( type == AdjustmentType . FULL_VIEW ) { scale = Math . max ( scaleX , scaleY ) ; } else if ( type == AdjustmentType . EXPAND ) { scale = Math . min ( scaleX , scaleY ) ; } else if ( type == AdjustmentType . CENTER ) { scale = Math . max ( scaleX , scaleY ) ; } else { scale = 1.0 ; } double deltaX = ( bound . x0 + ( scaleX - scale ) * paramDesired . width / 2.0 ) ; double deltaY = ( bound . y0 + ( scaleY - scale ) * paramDesired . height / 2.0 ) ; // adjustment matrix
DMatrixRMaj A = new DMatrixRMaj ( 3 , 3 , true , scale , 0 , deltaX , 0 , scale , deltaY , 0 , 0 , 1 ) ; DMatrixRMaj A_inv = new DMatrixRMaj ( 3 , 3 ) ; if ( ! CommonOps_DDRM . invert ( A , A_inv ) ) { throw new RuntimeException ( "Failed to invert adjustment matrix. Probably bad." ) ; } if ( paramMod != null ) { PerspectiveOps . adjustIntrinsic ( paramDesired , A_inv , paramMod ) ; } if ( desiredToOriginal ) { Point2Transform2_F64 des_p_to_n = desired . undistort_F64 ( true , false ) ; Point2Transform2_F64 ori_n_to_p = original . distort_F64 ( false , true ) ; PointTransformHomography_F64 adjust = new PointTransformHomography_F64 ( A ) ; return new SequencePoint2Transform2_F64 ( adjust , des_p_to_n , ori_n_to_p ) ; } else { PointTransformHomography_F64 adjust = new PointTransformHomography_F64 ( A_inv ) ; return new SequencePoint2Transform2_F64 ( ori_to_des , adjust ) ; } |
public class MongoDBUtils { /** * Calculate m d5.
* @ param val
* the val
* @ return the string */
public static String calculateMD5 ( Object val ) { } } | MessageDigest md = null ; try { md = MessageDigest . getInstance ( "MD5" ) ; } catch ( NoSuchAlgorithmException e ) { logger . error ( "Unable to calculate MD5 for file, Caused By: " , e ) ; } md . update ( ( byte [ ] ) val ) ; byte [ ] digest = md . digest ( ) ; return DatatypeConverter . printHexBinary ( digest ) . toLowerCase ( ) ; |
public class PackageCache { /** * Method that gets invoked from load ( ) . It checks if the
* framework asset packages and current MDW build versions are the same .
* Otherwise logs a warning message . */
private static void validatePackageVersion ( List < Package > packages ) { } } | final String exceptions = ".*\\b(oracle|tibco|demo|hub)\\b.*" ; String version = ApplicationContext . getMdwVersion ( ) ; String mdwVersion = version . split ( "\\-" ) [ 0 ] ; List < Package > filteredPackages = packages . stream ( ) . filter ( e -> ! mdwVersion . equals ( e . getVersionString ( ) ) && e . getName ( ) . startsWith ( "com.centurylink.mdw" ) && ! e . getName ( ) . startsWith ( "com.centurylink.mdw.central" ) ) . collect ( Collectors . toList ( ) ) ; List < Package > obsoletePackages = filteredPackages . stream ( ) . filter ( p2 -> ! ( p2 . getName ( ) . matches ( exceptions ) ) ) . collect ( Collectors . toList ( ) ) ; if ( ! obsoletePackages . isEmpty ( ) ) { StringBuilder message = new StringBuilder ( ) ; message . append ( "\n****************************************\n" + "** WARNING: These asset packages do not match current build version " + mdwVersion + "\n" ) ; for ( Package p1 : obsoletePackages ) { message . append ( "** " + p1 . getLabel ( ) + "\n" ) ; } message . append ( "******************************************\n" ) ; logger . warn ( message . toString ( ) ) ; } |
public class GenReaderThread { /** * Create a number of threads to generate read traffics
* @ param conf
* @ param key directory of files to read
* @ param value checksum file locaiton
* @ return
* @ throws IOException */
@ Override public GenThread [ ] prepare ( JobConf conf , Text key , Text value ) throws IOException { } } | this . rtc = new RunTimeConstants ( ) ; super . prepare ( conf , key , value , rtc ) ; Path basePath = new Path ( key . toString ( ) ) ; LOG . info ( "base path is " + basePath ) ; Path checksumPath = null ; FileSystem fs = FileSystem . newInstance ( conf ) ; if ( value . toString ( ) . length ( ) != 0 ) { checksumPath = new Path ( value . toString ( ) ) ; } HashMap < String , Long > checksumMap = null ; boolean verifyChecksum = false ; if ( fs . exists ( checksumPath ) ) { LOG . info ( "checksum path is " + checksumPath ) ; verifyChecksum = true ; checksumMap = new HashMap < String , Long > ( ) ; SequenceFile . Reader reader = null ; try { reader = new SequenceFile . Reader ( fs , checksumPath , conf ) ; Writable dir = ( Writable ) ReflectionUtils . newInstance ( reader . getKeyClass ( ) , conf ) ; Writable checksum = ( Writable ) ReflectionUtils . newInstance ( reader . getValueClass ( ) , conf ) ; while ( reader . next ( dir , checksum ) ) { LOG . info ( "dir: " + dir . toString ( ) + " checksum: " + checksum ) ; checksumMap . put ( fs . makeQualified ( new Path ( dir . toString ( ) ) ) . toUri ( ) . getPath ( ) , Long . parseLong ( checksum . toString ( ) ) ) ; } } catch ( Exception e ) { LOG . error ( e ) ; throw new IOException ( e ) ; } finally { IOUtils . closeStream ( reader ) ; } } FileStatus [ ] baseDirs = fs . listStatus ( basePath ) ; if ( rtc . nthreads != baseDirs . length ) { throw new IOException ( "Number of directory under " + basePath + "(" + baseDirs . length + ") doesn't match number of threads " + "(" + rtc . nthreads + ")." ) ; } GenReaderThread [ ] threads = new GenReaderThread [ ( int ) rtc . nthreads ] ; for ( int i = 0 ; i < rtc . nthreads ; i ++ ) { long checksum = 0 ; if ( verifyChecksum ) { String basePathStr = baseDirs [ i ] . getPath ( ) . toUri ( ) . getPath ( ) ; checksum = checksumMap . get ( basePathStr ) ; } threads [ i ] = new GenReaderThread ( conf , baseDirs [ i ] . getPath ( ) , checksum , verifyChecksum , rtc ) ; } return threads ; |
public class VertxGenerator { /** * Overwrite this method to handle your custom type . This is needed especially when you have custom converters .
* @ param column the column definition
* @ param getter the getter name
* @ param columnType the type of the column
* @ param javaMemberName the java member name
* @ param out the JavaWriter
* @ return < code > true < / code > if the column was handled .
* @ see # generateToJson ( TableDefinition , JavaWriter , org . jooq . codegen . GeneratorStrategy . Mode ) */
protected boolean handleCustomTypeToJson ( TypedElementDefinition < ? > column , String getter , String columnType , String javaMemberName , JavaWriter out ) { } } | return false ; |
public class vrid { /** * Use this API to fetch filtered set of vrid resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static vrid [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | vrid obj = new vrid ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; vrid [ ] response = ( vrid [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class LinearSolver_DDRB_to_DDRM { /** * Converts B and X into block matrices and calls the block matrix solve routine .
* @ param B A matrix & real ; < sup > m & times ; p < / sup > . Not modified .
* @ param X A matrix & real ; < sup > n & times ; p < / sup > , where the solution is written to . Modified . */
@ Override public void solve ( DMatrixRMaj B , DMatrixRMaj X ) { } } | X . reshape ( blockA . numCols , B . numCols ) ; blockB . reshape ( B . numRows , B . numCols , false ) ; blockX . reshape ( X . numRows , X . numCols , false ) ; MatrixOps_DDRB . convert ( B , blockB ) ; alg . solve ( blockB , blockX ) ; MatrixOps_DDRB . convert ( blockX , X ) ; |
public class DirectionUtil { /** * Get the direction closest to the specified direction , out of the directions in the possible
* list .
* @ param preferCW whether to prefer a clockwise match or a counter - clockwise match . */
public static int getClosest ( int direction , int [ ] possible , boolean preferCW ) { } } | // rotate a tick at a time , looking for matches
int first = direction ; int second = direction ; for ( int ii = 0 ; ii <= FINE_DIRECTION_COUNT / 2 ; ii ++ ) { if ( IntListUtil . contains ( possible , first ) ) { return first ; } if ( ii != 0 && IntListUtil . contains ( possible , second ) ) { return second ; } first = preferCW ? rotateCW ( first , 1 ) : rotateCCW ( first , 1 ) ; second = preferCW ? rotateCCW ( second , 1 ) : rotateCW ( second , 1 ) ; } return NONE ; |
public class AmazonPinpointClient { /** * Delete an ADM channel .
* @ param deleteAdmChannelRequest
* @ return Result of the DeleteAdmChannel operation returned by the service .
* @ throws BadRequestException
* 400 response
* @ throws InternalServerErrorException
* 500 response
* @ throws ForbiddenException
* 403 response
* @ throws NotFoundException
* 404 response
* @ throws MethodNotAllowedException
* 405 response
* @ throws TooManyRequestsException
* 429 response
* @ sample AmazonPinpoint . DeleteAdmChannel
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / DeleteAdmChannel " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DeleteAdmChannelResult deleteAdmChannel ( DeleteAdmChannelRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDeleteAdmChannel ( request ) ; |
public class RecurlyClient { /** * Get a particular { @ link Subscription } by it ' s UUID
* Returns information about a single subscription .
* @ param uuid UUID of the subscription to lookup
* @ return Subscription */
public Subscription getSubscription ( final String uuid ) { } } | if ( uuid == null || uuid . isEmpty ( ) ) throw new RuntimeException ( "uuid cannot be empty!" ) ; return doGET ( Subscriptions . SUBSCRIPTIONS_RESOURCE + "/" + uuid , Subscription . class ) ; |
public class RecordSetsInner { /** * Lists the record sets of a specified type in a DNS zone .
* @ param resourceGroupName The name of the resource group .
* @ param zoneName The name of the DNS zone ( without a terminating dot ) .
* @ param recordType The type of record sets to enumerate . Possible values include : ' A ' , ' AAAA ' , ' CNAME ' , ' MX ' , ' NS ' , ' PTR ' , ' SOA ' , ' SRV ' , ' TXT '
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the PagedList & lt ; RecordSetInner & gt ; object if successful . */
public PagedList < RecordSetInner > listByType ( final String resourceGroupName , final String zoneName , final RecordType recordType ) { } } | ServiceResponse < Page < RecordSetInner > > response = listByTypeSinglePageAsync ( resourceGroupName , zoneName , recordType ) . toBlocking ( ) . single ( ) ; return new PagedList < RecordSetInner > ( response . body ( ) ) { @ Override public Page < RecordSetInner > nextPage ( String nextPageLink ) { return listByTypeNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ; |
public class EditableTextArea { /** * Factory method for create a new { @ link EditableTextArea } object .
* @ param < T >
* the generic type of model object
* @ param id
* the id
* @ param model
* the model
* @ param labelModel
* the label model
* @ param modeContext
* the editable flag
* @ return the new created { @ link EditableTextArea } object . */
public static < T > EditableTextArea < T > of ( final String id , final IModel < T > model , final IModel < String > labelModel , final ModeContext modeContext ) { } } | final EditableTextArea < T > editableTextArea = new EditableTextArea < > ( id , model , labelModel , modeContext ) ; return editableTextArea ; |
public class ResourceProvider { /** * Checks if file exists in the default location .
* If that ' s not the case , file is looked up starting from the root .
* @ return determined file location */
protected String determineLocation ( String location ) { } } | if ( existsInDefaultLocation ( location ) ) { return defaultFolder ( ) + location ; } if ( ! existsInGivenLocation ( location ) ) { throw new InvalidResourceLocation ( "Unable to locate " + location + ". " + "File does not exist also in default location " + defaultLocation ( ) ) ; } return location ; |
public class InteractionCoordinator { /** * Find the corresponding procedures and execute it .
* @ param event */
@ Override public void onInteractionEvent ( final InteractionEvent event ) { } } | QName id = event . getId ( ) ; QName source = ( QName ) event . getSource ( ) ; final Set < Procedure > collection = procedures . get ( id ) ; Procedure execution = null ; if ( collection != null ) { for ( Procedure consumer : collection ) { // TODO : This isn ' t optimal ( creation of new resource with every comparison )
Resource < ResourceType > resource = new Resource < ResourceType > ( id , ResourceType . Interaction ) ; resource . setSource ( source ) ; boolean justified = consumer . getJustification ( ) == null || source . equals ( consumer . getJustification ( ) ) ; if ( consumer . doesConsume ( resource ) && justified ) { execution = consumer ; break ; } } } if ( null == execution ) { Window . alert ( "No procedure for " + event ) ; Log . warn ( "No procedure for " + event ) ; } else if ( execution . getPrecondition ( ) . isMet ( getStatementContext ( source ) ) ) // guarded
{ try { execution . getCommand ( ) . execute ( InteractionCoordinator . this . dialog , event . getPayload ( ) ) ; } catch ( Throwable e ) { Log . error ( "Failed to execute procedure " + execution , e ) ; } } |
public class AgentRoster { /** * Returns the key to use in the presenceMap for a fully qualified xmpp ID . The roster
* can contain any valid address format such us " domain / resource " , " user @ domain " or
* " user @ domain / resource " . If the roster contains an entry associated with the fully qualified
* xmpp ID then use the fully qualified xmpp ID as the key in presenceMap , otherwise use the
* bare address . Note : When the key in presenceMap is a fully qualified xmpp ID , the
* userPresences is useless since it will always contain one entry for the user .
* @ param user the fully qualified xmpp ID , e . g . jdoe @ example . com / Work .
* @ return the key to use in the presenceMap for the fully qualified xmpp ID . */
private Jid getPresenceMapKey ( Jid user ) { } } | Jid key = user ; if ( ! contains ( user ) ) { key = user . asEntityBareJidIfPossible ( ) ; } return key ; |
public class Setting { /** * Constructs a setting of { @ link Boolean } type , which is represented by a { @ link ToggleControl } .
* @ param description the title of this setting
* @ param property to be bound , saved / loaded and used for undo / redo
* @ return the constructed setting */
public static Setting of ( String description , BooleanProperty property ) { } } | return new Setting < > ( description , Field . ofBooleanType ( property ) . label ( description ) . render ( new ToggleControl ( ) ) , property ) ; |
public class Stream { /** * Zip together the " a " and " b " iterators until one of them runs out of values .
* Each pair of values is combined into a single value using the supplied zipFunction function .
* @ param a
* @ param b
* @ return */
public static < R > Stream < R > zip ( final ShortIterator a , final ShortIterator b , final ShortBiFunction < R > zipFunction ) { } } | return new IteratorStream < > ( new ObjIteratorEx < R > ( ) { @ Override public boolean hasNext ( ) { return a . hasNext ( ) && b . hasNext ( ) ; } @ Override public R next ( ) { return zipFunction . apply ( a . nextShort ( ) , b . nextShort ( ) ) ; } } ) ; |
public class HttpSerializer { /** * Formats the results of an HTTP data point storage request
* @ param results A map of results . The map will consist of :
* < ul > < li > success - ( long ) the number of successfully parsed datapoints < / li >
* < li > failed - ( long ) the number of datapoint parsing failures < / li >
* < li > errors - ( ArrayList & lt ; HashMap & lt ; String , Object & gt ; & gt ; ) an optional list of
* datapoints that had errors . The nested map has these fields :
* < ul > < li > error - ( String ) the error that occurred < / li >
* < li > datapoint - ( IncomingDatapoint ) the datapoint that generated the error
* < / li > < / ul > < / li > < / ul >
* @ return A ChannelBuffer object to pass on to the caller
* @ throws BadRequestException if the plugin has not implemented this method */
public ChannelBuffer formatPutV1 ( final Map < String , Object > results ) { } } | throw new BadRequestException ( HttpResponseStatus . NOT_IMPLEMENTED , "The requested API endpoint has not been implemented" , this . getClass ( ) . getCanonicalName ( ) + " has not implemented formatPutV1" ) ; |
public class BreakIterator { /** * Sets the iterator ' s current iteration position to be the last
* boundary position preceding the specified position . ( Whether the
* specified position is itself a boundary position or not doesn ' t
* matter - - this function always moves the iteration position to the
* last boundary before the specified position . ) If the specified
* position is the starting position , returns DONE .
* @ param offset The character position to start searching from .
* @ return The position of the last boundary position preceding
* " offset " ( whether of not " offset " itself is a boundary position ) ,
* or DONE if " offset " is the starting offset of the iterator . */
public int preceding ( int offset ) { } } | // NOTE : This implementation is here solely because we can ' t add new
// abstract methods to an existing class . There is almost ALWAYS a
// better , faster way to do this .
int pos = following ( offset ) ; while ( pos >= offset && pos != DONE ) pos = previous ( ) ; return pos ; |
public class DelaunayTriangle { /** * The point counter - clockwise to given point */
public TriangulationPoint pointCCW ( TriangulationPoint point ) { } } | if ( point == points [ 0 ] ) { return points [ 1 ] ; } else if ( point == points [ 1 ] ) { return points [ 2 ] ; } else if ( point == points [ 2 ] ) { return points [ 0 ] ; } logger . error ( "point location error" ) ; throw new RuntimeException ( "[FIXME] point location error" ) ; |
public class CmsMessageBundleEditorModel { /** * Initializes the editor states for the different modes , depending on the type of the opened file . */
private void initEditorStates ( ) { } } | m_editorState = new HashMap < CmsMessageBundleEditorTypes . EditMode , EditorState > ( ) ; List < TableProperty > cols = null ; switch ( m_bundleType ) { case PROPERTY : case XML : if ( hasDescriptor ( ) ) { // bundle descriptor is present , keys are not editable in default mode , maybe master mode is available
m_editorState . put ( CmsMessageBundleEditorTypes . EditMode . DEFAULT , getDefaultState ( ) ) ; if ( hasMasterMode ( ) ) { // the bundle descriptor is editable
m_editorState . put ( CmsMessageBundleEditorTypes . EditMode . MASTER , getMasterState ( ) ) ; } } else { // no bundle descriptor given - implies no master mode
cols = new ArrayList < TableProperty > ( 1 ) ; cols . add ( TableProperty . KEY ) ; cols . add ( TableProperty . TRANSLATION ) ; m_editorState . put ( CmsMessageBundleEditorTypes . EditMode . DEFAULT , new EditorState ( cols , true ) ) ; } break ; case DESCRIPTOR : cols = new ArrayList < TableProperty > ( 3 ) ; cols . add ( TableProperty . KEY ) ; cols . add ( TableProperty . DESCRIPTION ) ; cols . add ( TableProperty . DEFAULT ) ; m_editorState . put ( CmsMessageBundleEditorTypes . EditMode . DEFAULT , new EditorState ( cols , true ) ) ; break ; default : throw new IllegalArgumentException ( ) ; } |
public class CharOperation { /** * Answers the first index in the array for which the toBeFound array is a matching subarray following the case rule
* starting at the index start . Answers - 1 if no match is found . < br >
* < br >
* For example :
* < ol >
* < li >
* < pre >
* toBeFound = { ' c ' }
* array = { ' a ' , ' b ' , ' c ' , ' d ' }
* result = & gt ; 2
* < / pre >
* < / li >
* < li >
* < pre >
* toBeFound = { ' e ' }
* array = { ' a ' , ' b ' , ' c ' , ' d ' }
* result = & gt ; - 1
* < / pre >
* < / li >
* < / ol >
* @ param toBeFound
* the subarray to search
* @ param array
* the array to be searched
* @ param isCaseSensitive
* flag to know if the matching should be case sensitive
* @ param start
* the starting index ( inclusive )
* @ param end
* the end index ( exclusive )
* @ return the first index in the array for which the toBeFound array is a matching subarray following the case rule
* starting at the index start , - 1 otherwise
* @ throws NullPointerException
* if array is null or toBeFound is null */
public static final int indexOf ( final char [ ] toBeFound , final char [ ] array , final boolean isCaseSensitive , final int start , final int end ) { } } | final int arrayLength = end ; final int toBeFoundLength = toBeFound . length ; if ( toBeFoundLength > arrayLength ) { return - 1 ; } if ( toBeFoundLength == 0 ) { return 0 ; } if ( toBeFoundLength == arrayLength ) { if ( isCaseSensitive ) { for ( int i = start ; i < arrayLength ; i ++ ) { if ( array [ i ] != toBeFound [ i ] ) { return - 1 ; } } return 0 ; } else { for ( int i = start ; i < arrayLength ; i ++ ) { if ( Character . toLowerCase ( array [ i ] ) != Character . toLowerCase ( toBeFound [ i ] ) ) { return - 1 ; } } return 0 ; } } if ( isCaseSensitive ) { arrayLoop : for ( int i = start , max = arrayLength - toBeFoundLength + 1 ; i < max ; i ++ ) { if ( array [ i ] == toBeFound [ 0 ] ) { for ( int j = 1 ; j < toBeFoundLength ; j ++ ) { if ( array [ i + j ] != toBeFound [ j ] ) { continue arrayLoop ; } } return i ; } } } else { arrayLoop : for ( int i = start , max = arrayLength - toBeFoundLength + 1 ; i < max ; i ++ ) { if ( Character . toLowerCase ( array [ i ] ) == Character . toLowerCase ( toBeFound [ 0 ] ) ) { for ( int j = 1 ; j < toBeFoundLength ; j ++ ) { if ( Character . toLowerCase ( array [ i + j ] ) != Character . toLowerCase ( toBeFound [ j ] ) ) { continue arrayLoop ; } } return i ; } } } return - 1 ; |
public class BatchGetPartitionResult { /** * A list of the requested partitions .
* @ param partitions
* A list of the requested partitions . */
public void setPartitions ( java . util . Collection < Partition > partitions ) { } } | if ( partitions == null ) { this . partitions = null ; return ; } this . partitions = new java . util . ArrayList < Partition > ( partitions ) ; |
public class InputRenderer { /** * Adds ARIA attributes if the component is " role = combobox " .
* @ param context the { @ link FacesContext }
* @ param component the { @ link UIInput } component to add attributes for
* @ throws IOException if any error occurs writing the response
* @ see https : / / www . w3 . org / TR / wai - aria - practices / # combobox */
protected void renderARIACombobox ( FacesContext context , UIInput component ) throws IOException { } } | ResponseWriter writer = context . getResponseWriter ( ) ; writer . writeAttribute ( "role" , "combobox" , null ) ; writer . writeAttribute ( HTML . ARIA_HASPOPUP , "true" , null ) ; writer . writeAttribute ( HTML . ARIA_EXPANDED , "false" , null ) ; |
public class CmsSubscriptionCollector { /** * Returns the collector parameters . < p >
* @ param param the collector parameter
* @ return the collector parameters */
private Map < String , String > getParameters ( String param ) { } } | if ( CmsStringUtil . isNotEmpty ( param ) ) { return CmsStringUtil . splitAsMap ( param , "|" , "=" ) ; } return Collections . emptyMap ( ) ; |
public class MOEADD { /** * calculate the sum of fitnesses of solutions in the location subregion */
public double sumFitness ( int location ) { } } | double sum = 0 ; for ( int i = 0 ; i < populationSize ; i ++ ) { if ( subregionIdx [ location ] [ i ] == 1 ) { sum = sum + fitnessFunction ( population . get ( i ) , lambda [ location ] ) ; } } return sum ; |
public class IllegalPatternArgumentException { /** * Returns the formatted string { @ link IllegalPatternArgumentException # MESSAGE _ WITH _ NAME } with the given
* { @ code argumentName } and pattern which the argument must match .
* @ param argumentName
* the name of the passed argument
* @ param pattern
* Pattern , that a string or character sequence should correspond to
* @ return a formatted string of message with the given argument name */
private static String format ( @ Nullable final String argumentName , @ Nullable final Pattern pattern ) { } } | final String p = patternToString ( pattern ) ; return String . format ( MESSAGE_WITH_NAME , argumentName , p ) ; |
public class ChunkingOutputStream { protected void bypassWrite ( byte [ ] b , int offset , int length ) throws IOException { } } | int i = 9 ; int chunk = length ; _buf [ 10 ] = ( byte ) '\012' ; _buf [ 9 ] = ( byte ) '\015' ; while ( chunk > 0 ) { int d = chunk % 16 ; if ( d <= 9 ) _buf [ -- i ] = ( byte ) ( '0' + d ) ; else _buf [ -- i ] = ( byte ) ( 'a' - 10 + d ) ; chunk = chunk / 16 ; } if ( _chunking ) _out . write ( _buf , i , 10 - i + 1 ) ; _out . write ( b , offset , length ) ; if ( _chunking ) _out . write ( __CRLF , 0 , __CRLF . length ) ; _out . flush ( ) ; |
public class MutableBigInteger { /** * Adds the contents of two MutableBigInteger objects . The result
* is placed within this MutableBigInteger .
* The contents of the addend are not changed . */
void add ( MutableBigInteger addend ) { } } | int x = intLen ; int y = addend . intLen ; int resultLen = ( intLen > addend . intLen ? intLen : addend . intLen ) ; int [ ] result = ( value . length < resultLen ? new int [ resultLen ] : value ) ; int rstart = result . length - 1 ; long sum ; long carry = 0 ; // Add common parts of both numbers
while ( x > 0 && y > 0 ) { x -- ; y -- ; sum = ( value [ x + offset ] & LONG_MASK ) + ( addend . value [ y + addend . offset ] & LONG_MASK ) + carry ; result [ rstart -- ] = ( int ) sum ; carry = sum >>> 32 ; } // Add remainder of the longer number
while ( x > 0 ) { x -- ; if ( carry == 0 && result == value && rstart == ( x + offset ) ) return ; sum = ( value [ x + offset ] & LONG_MASK ) + carry ; result [ rstart -- ] = ( int ) sum ; carry = sum >>> 32 ; } while ( y > 0 ) { y -- ; sum = ( addend . value [ y + addend . offset ] & LONG_MASK ) + carry ; result [ rstart -- ] = ( int ) sum ; carry = sum >>> 32 ; } if ( carry > 0 ) { // Result must grow in length
resultLen ++ ; if ( result . length < resultLen ) { int temp [ ] = new int [ resultLen ] ; // Result one word longer from carry - out ; copy low - order
// bits into new result .
System . arraycopy ( result , 0 , temp , 1 , result . length ) ; temp [ 0 ] = 1 ; result = temp ; } else { result [ rstart -- ] = 1 ; } } value = result ; intLen = resultLen ; offset = result . length - resultLen ; |
public class GVRCollider { /** * Lookup a native pointer to a collider and return its Java object .
* @ param nativePointer native pointer to C + + Collider
* @ return Java GVRCollider object */
static GVRCollider lookup ( long nativePointer ) { } } | synchronized ( sColliders ) { WeakReference < GVRCollider > weakReference = sColliders . get ( nativePointer ) ; return weakReference == null ? null : weakReference . get ( ) ; } |
public class TraceSummary { /** * A list of availability zones for any zone corresponding to the trace segments .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAvailabilityZones ( java . util . Collection ) } or { @ link # withAvailabilityZones ( java . util . Collection ) } if
* you want to override the existing values .
* @ param availabilityZones
* A list of availability zones for any zone corresponding to the trace segments .
* @ return Returns a reference to this object so that method calls can be chained together . */
public TraceSummary withAvailabilityZones ( AvailabilityZoneDetail ... availabilityZones ) { } } | if ( this . availabilityZones == null ) { setAvailabilityZones ( new java . util . ArrayList < AvailabilityZoneDetail > ( availabilityZones . length ) ) ; } for ( AvailabilityZoneDetail ele : availabilityZones ) { this . availabilityZones . add ( ele ) ; } return this ; |
public class Context { /** * Value Compaction Algorithm
* http : / / json - ld . org / spec / latest / json - ld - api / # value - compaction
* @ param activeProperty
* The Active Property
* @ param value
* The value to compact
* @ return The compacted value */
public Object compactValue ( String activeProperty , Map < String , Object > value ) { } } | int numberMembers = value . size ( ) ; if ( value . containsKey ( JsonLdConsts . INDEX ) && JsonLdConsts . INDEX . equals ( this . getContainer ( activeProperty ) ) ) { numberMembers -- ; } if ( numberMembers > 2 ) { return value ; } final String typeMapping = getTypeMapping ( activeProperty ) ; final String languageMapping = getLanguageMapping ( activeProperty ) ; if ( value . containsKey ( JsonLdConsts . ID ) ) { // 4.1)
if ( numberMembers == 1 && JsonLdConsts . ID . equals ( typeMapping ) ) { return compactIri ( ( String ) value . get ( JsonLdConsts . ID ) ) ; } // 4.2)
if ( numberMembers == 1 && JsonLdConsts . VOCAB . equals ( typeMapping ) ) { return compactIri ( ( String ) value . get ( JsonLdConsts . ID ) , true ) ; } // 4.3)
return value ; } final Object valueValue = value . get ( JsonLdConsts . VALUE ) ; if ( value . containsKey ( JsonLdConsts . TYPE ) && Obj . equals ( value . get ( JsonLdConsts . TYPE ) , typeMapping ) ) { return valueValue ; } if ( value . containsKey ( JsonLdConsts . LANGUAGE ) ) { // TODO : SPEC : doesn ' t specify to check default language as well
if ( Obj . equals ( value . get ( JsonLdConsts . LANGUAGE ) , languageMapping ) || Obj . equals ( value . get ( JsonLdConsts . LANGUAGE ) , this . get ( JsonLdConsts . LANGUAGE ) ) ) { return valueValue ; } } if ( numberMembers == 1 && ( ! ( valueValue instanceof String ) || ! this . containsKey ( JsonLdConsts . LANGUAGE ) || ( termDefinitions . containsKey ( activeProperty ) && getTermDefinition ( activeProperty ) . containsKey ( JsonLdConsts . LANGUAGE ) && languageMapping == null ) ) ) { return valueValue ; } return value ; |
public class AWSSimpleSystemsManagementClient { /** * Get detailed information about a particular Automation execution .
* @ param getAutomationExecutionRequest
* @ return Result of the GetAutomationExecution operation returned by the service .
* @ throws AutomationExecutionNotFoundException
* There is no automation execution information for the requested automation execution ID .
* @ throws InternalServerErrorException
* An error occurred on the server side .
* @ sample AWSSimpleSystemsManagement . GetAutomationExecution
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / GetAutomationExecution " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetAutomationExecutionResult getAutomationExecution ( GetAutomationExecutionRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetAutomationExecution ( request ) ; |
public class CPInstancePersistenceImpl { /** * Returns the last cp instance in the ordered set where groupId = & # 63 ; and status = & # 63 ; .
* @ param groupId the group ID
* @ param status the status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp instance
* @ throws NoSuchCPInstanceException if a matching cp instance could not be found */
@ Override public CPInstance findByG_ST_Last ( long groupId , int status , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } } | CPInstance cpInstance = fetchByG_ST_Last ( groupId , status , orderByComparator ) ; if ( cpInstance != null ) { return cpInstance ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", status=" ) ; msg . append ( status ) ; msg . append ( "}" ) ; throw new NoSuchCPInstanceException ( msg . toString ( ) ) ; |
public class CmsRectangle { /** * Constrains a point to this rectangle . < p >
* If any of the coordinates of the point lie in the projection of this rectangle on the corresponding axis , that coordinate
* in the result will be unchanged , otherwise it will be either the maximum or the minimum depending on on which side the original
* coordinate is located .
* @ param point the point to constrain
* @ return the constrained point */
public CmsPoint constrain ( CmsPoint point ) { } } | return new CmsPoint ( constrainNum ( m_left , m_width , point . getX ( ) ) , constrainNum ( m_top , m_height , point . getY ( ) ) ) ; |
public class tmtrafficpolicy_binding { /** * Use this API to fetch tmtrafficpolicy _ binding resource of given name . */
public static tmtrafficpolicy_binding get ( nitro_service service , String name ) throws Exception { } } | tmtrafficpolicy_binding obj = new tmtrafficpolicy_binding ( ) ; obj . set_name ( name ) ; tmtrafficpolicy_binding response = ( tmtrafficpolicy_binding ) obj . get_resource ( service ) ; return response ; |
public class RowHasher { /** * This method can be called to register table optimizations before initializing Fluo . This will
* register { @ link Optimizer } with
* { @ link TableOptimizations # registerOptimization ( SimpleConfiguration , String , Class ) } . See the
* project level documentation for an example .
* @ param fluoConfig The config that will be used to initialize Fluo
* @ param prefix The prefix used for your Row Hasher . If you have a single instance , could call
* { @ link RowHasher # getPrefix ( ) } .
* @ param numTablets Initial number of tablet to create . */
public static void configure ( FluoConfiguration fluoConfig , String prefix , int numTablets ) { } } | fluoConfig . getAppConfiguration ( ) . setProperty ( PREFIX + prefix + ".numTablets" , numTablets ) ; TableOptimizations . registerOptimization ( fluoConfig . getAppConfiguration ( ) , prefix , Optimizer . class ) ; |
public class NERServer { /** * Starts this server on the specified port . The classifier used can be
* either a default one stored in the jar file from which this code is
* invoked or you can specify it as a filename or as another classifier
* resource name , which must correspond to the name of a resource in the
* / classifiers / directory of the jar file .
* Usage : < code > java edu . stanford . nlp . ie . NERServer [ - loadClassifier file | - loadJarClassifier resource | - client ] - port portNumber < / code >
* @ param args Command - line arguments ( described above )
* @ throws Exception If file or Java class problems with serialized classifier */
@ SuppressWarnings ( { } } | "StringEqualsEmptyString" } ) public static void main ( String [ ] args ) throws Exception { Properties props = StringUtils . argsToProperties ( args ) ; String loadFile = props . getProperty ( "loadClassifier" ) ; String loadJarFile = props . getProperty ( "loadJarClassifier" ) ; String client = props . getProperty ( "client" ) ; String portStr = props . getProperty ( "port" ) ; props . remove ( "port" ) ; // so later code doesn ' t complain
if ( portStr == null || portStr . equals ( "" ) ) { System . err . println ( USAGE ) ; return ; } String charset = "utf-8" ; String encoding = props . getProperty ( "encoding" ) ; if ( encoding != null && ! "" . equals ( encoding ) ) { charset = encoding ; } int port ; try { port = Integer . parseInt ( portStr ) ; } catch ( NumberFormatException e ) { System . err . println ( "Non-numerical port" ) ; System . err . println ( USAGE ) ; return ; } // default output format for if no output format is specified
if ( props . getProperty ( "outputFormat" ) == null ) { props . setProperty ( "outputFormat" , "slashTags" ) ; } if ( client != null && ! client . equals ( "" ) ) { // run a test client for illustration / testing
String host = props . getProperty ( "host" ) ; NERClient . communicateWithNERServer ( host , port , charset ) ; } else { AbstractSequenceClassifier asc ; if ( loadFile != null && ! loadFile . equals ( "" ) ) { asc = CRFClassifier . getClassifier ( loadFile , props ) ; } else if ( loadJarFile != null && ! loadJarFile . equals ( "" ) ) { asc = CRFClassifier . getJarClassifier ( loadJarFile , props ) ; } else { asc = CRFClassifier . getDefaultClassifier ( props ) ; } new NERServer ( port , asc , charset ) . run ( ) ; } |
public class CommercePriceListUserSegmentEntryRelUtil { /** * Returns the commerce price list user segment entry rel where commercePriceListId = & # 63 ; and commerceUserSegmentEntryId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache .
* @ param commercePriceListId the commerce price list ID
* @ param commerceUserSegmentEntryId the commerce user segment entry ID
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the matching commerce price list user segment entry rel , or < code > null < / code > if a matching commerce price list user segment entry rel could not be found */
public static CommercePriceListUserSegmentEntryRel fetchByC_C ( long commercePriceListId , long commerceUserSegmentEntryId , boolean retrieveFromCache ) { } } | return getPersistence ( ) . fetchByC_C ( commercePriceListId , commerceUserSegmentEntryId , retrieveFromCache ) ; |
public class ModularParser { /** * Building a ContentElement , this funciton is calles by all the other
* parseContentElement ( . . ) functions */
private ContentElement parseContentElement ( SpanManager sm , ContentElementParsingParameters cepp , LinkedList < Span > lineSpans , ContentElement result ) { } } | List < Link > localLinks = new ArrayList < Link > ( ) ; List < Template > localTemplates = new ArrayList < Template > ( ) ; List < Span > boldSpans = new ArrayList < Span > ( ) ; List < Span > italicSpans = new ArrayList < Span > ( ) ; sm . manageList ( boldSpans ) ; sm . manageList ( italicSpans ) ; List < Span > managedSpans = new ArrayList < Span > ( ) ; sm . manageList ( managedSpans ) ; Span contentElementRange = new Span ( lineSpans . getFirst ( ) . getStart ( ) , lineSpans . getLast ( ) . getEnd ( ) ) . trim ( sm ) ; managedSpans . add ( contentElementRange ) ; // set the SrcSpan
if ( calculateSrcSpans ) { result . setSrcSpan ( new SrcSpan ( sm . getSrcPos ( contentElementRange . getStart ( ) ) , sm . getSrcPos ( contentElementRange . getEnd ( ) ) ) ) ; } sm . manageList ( lineSpans ) ; while ( ! lineSpans . isEmpty ( ) ) { Span line = lineSpans . getFirst ( ) ; parseBoldAndItalicSpans ( sm , line , boldSpans , italicSpans ) ; // External links
parseExternalLinks ( sm , line , "http://" , managedSpans , localLinks , result ) ; parseExternalLinks ( sm , line , "https://" , managedSpans , localLinks , result ) ; parseExternalLinks ( sm , line , "ftp://" , managedSpans , localLinks , result ) ; parseExternalLinks ( sm , line , "mailto:" , managedSpans , localLinks , result ) ; // end of linewhise opperations
lineSpans . removeFirst ( ) ; } sm . removeManagedList ( lineSpans ) ; // Links
int i ; i = 0 ; while ( i < cepp . linkSpans . size ( ) ) { if ( contentElementRange . hits ( cepp . linkSpans . get ( i ) ) ) { Span linkSpan = cepp . linkSpans . remove ( i ) ; managedSpans . add ( linkSpan ) ; Link l = cepp . links . remove ( i ) . setHomeElement ( result ) ; localLinks . add ( l ) ; if ( ! showImageText && l . getType ( ) == Link . type . IMAGE ) { // deletes the Image Text from the ContentElement Text .
sm . delete ( linkSpan ) ; } } else { i ++ ; } } // Templates
i = 0 ; while ( i < cepp . templateSpans . size ( ) ) { Span ts = cepp . templateSpans . get ( i ) ; if ( contentElementRange . hits ( ts ) ) { ResolvedTemplate rt = cepp . templates . remove ( i ) ; if ( rt . getPostParseReplacement ( ) != null ) { sm . replace ( ts , rt . getPostParseReplacement ( ) ) ; } cepp . templateSpans . remove ( i ) ; Object parsedObject = rt . getParsedObject ( ) ; if ( parsedObject != null ) { managedSpans . add ( ts ) ; Class parsedObjectClass = parsedObject . getClass ( ) ; if ( parsedObjectClass == Template . class ) { localTemplates . add ( ( Template ) parsedObject ) ; } else if ( parsedObjectClass == Link . class ) { localLinks . add ( ( ( Link ) parsedObject ) . setHomeElement ( result ) ) ; } else { localTemplates . add ( rt . getTemplate ( ) ) ; } } } else { i ++ ; } } // HTML / XML Tags
i = 0 ; List < Span > tags = new ArrayList < Span > ( ) ; while ( i < cepp . tagSpans . size ( ) ) { Span s = cepp . tagSpans . get ( i ) ; if ( contentElementRange . hits ( s ) ) { cepp . tagSpans . remove ( i ) ; if ( deleteTags ) { sm . delete ( s ) ; } else { tags . add ( s ) ; managedSpans . add ( s ) ; } } else { i ++ ; } } // noWiki
i = 0 ; List < Span > localNoWikiSpans = new ArrayList < Span > ( ) ; while ( i < cepp . noWikiSpans . size ( ) ) { Span s = cepp . noWikiSpans . get ( i ) ; if ( contentElementRange . hits ( s ) ) { cepp . noWikiSpans . remove ( i ) ; sm . replace ( s , cepp . noWikiStrings . remove ( i ) ) ; localNoWikiSpans . add ( s ) ; managedSpans . add ( s ) ; } else { i ++ ; } } // MATH Tags
i = 0 ; List < Span > mathSpans = new ArrayList < Span > ( ) ; while ( i < cepp . mathSpans . size ( ) ) { Span s = cepp . mathSpans . get ( i ) ; if ( contentElementRange . hits ( s ) ) { cepp . mathSpans . remove ( i ) ; if ( showMathTagContent ) { mathSpans . add ( s ) ; managedSpans . add ( s ) ; sm . replace ( s , cepp . mathStrings . remove ( i ) ) ; } else { sm . delete ( s ) ; } } else { i ++ ; } } result . setText ( sm . substring ( contentElementRange ) ) ; // managed spans must be removed here and not earlier , because every
// change in the SpanManager affects the Spans !
sm . removeManagedList ( boldSpans ) ; sm . removeManagedList ( italicSpans ) ; sm . removeManagedList ( managedSpans ) ; // contentElementRange ist auch noch in managedSpans ! ! ! deswegen :
final int adjust = - contentElementRange . getStart ( ) ; for ( Span s : boldSpans ) { s . adjust ( adjust ) ; } for ( Span s : italicSpans ) { s . adjust ( adjust ) ; } for ( Span s : managedSpans ) { s . adjust ( adjust ) ; } result . setFormatSpans ( FormatType . BOLD , boldSpans ) ; result . setFormatSpans ( FormatType . ITALIC , italicSpans ) ; result . setFormatSpans ( FormatType . TAG , tags ) ; result . setFormatSpans ( FormatType . MATH , mathSpans ) ; result . setFormatSpans ( FormatType . NOWIKI , localNoWikiSpans ) ; result . setLinks ( sortLinks ( localLinks ) ) ; result . setTemplates ( sortTemplates ( localTemplates ) ) ; return result ; |
public class BitfinexApiCallbackListeners { /** * registers listener for candlesticks info updates
* @ param listener of event
* @ return hook of this listener */
public Closeable onCandlesticksEvent ( final BiConsumer < BitfinexCandlestickSymbol , Collection < BitfinexCandle > > listener ) { } } | candlesConsumers . offer ( listener ) ; return ( ) -> candlesConsumers . remove ( listener ) ; |
public class SerializerBase { /** * Report the CDATA trace event
* @ param chars content of CDATA
* @ param start starting index of characters to output
* @ param length number of characters to output */
protected void fireCDATAEvent ( char [ ] chars , int start , int length ) throws org . xml . sax . SAXException { } } | if ( m_tracer != null ) { flushMyWriter ( ) ; m_tracer . fireGenerateEvent ( SerializerTrace . EVENTTYPE_CDATA , chars , start , length ) ; } |
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 429:1 : interfaceMethodDeclaratorRest : formalParameters ( ' [ ' ' ] ' ) * ( ' throws ' qualifiedNameList ) ? ' ; ' ; */
public final void interfaceMethodDeclaratorRest ( ) throws RecognitionException { } } | int interfaceMethodDeclaratorRest_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 33 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:5 : ( formalParameters ( ' [ ' ' ] ' ) * ( ' throws ' qualifiedNameList ) ? ' ; ' )
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:7 : formalParameters ( ' [ ' ' ] ' ) * ( ' throws ' qualifiedNameList ) ? ' ; '
{ pushFollow ( FOLLOW_formalParameters_in_interfaceMethodDeclaratorRest1162 ) ; formalParameters ( ) ; state . _fsp -- ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:24 : ( ' [ ' ' ] ' ) *
loop48 : while ( true ) { int alt48 = 2 ; int LA48_0 = input . LA ( 1 ) ; if ( ( LA48_0 == 59 ) ) { alt48 = 1 ; } switch ( alt48 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:25 : ' [ ' ' ] '
{ match ( input , 59 , FOLLOW_59_in_interfaceMethodDeclaratorRest1165 ) ; if ( state . failed ) return ; match ( input , 60 , FOLLOW_60_in_interfaceMethodDeclaratorRest1167 ) ; if ( state . failed ) return ; } break ; default : break loop48 ; } } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:35 : ( ' throws ' qualifiedNameList ) ?
int alt49 = 2 ; int LA49_0 = input . LA ( 1 ) ; if ( ( LA49_0 == 113 ) ) { alt49 = 1 ; } switch ( alt49 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 430:36 : ' throws ' qualifiedNameList
{ match ( input , 113 , FOLLOW_113_in_interfaceMethodDeclaratorRest1172 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_qualifiedNameList_in_interfaceMethodDeclaratorRest1174 ) ; qualifiedNameList ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } match ( input , 52 , FOLLOW_52_in_interfaceMethodDeclaratorRest1178 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
if ( state . backtracking > 0 ) { memoize ( input , 33 , interfaceMethodDeclaratorRest_StartIndex ) ; } } |
public class JDefaultBase { /** * Fetches a value
* @ param key of the value to find
* @ return the value converted to type String */
protected static String fetchString ( String key ) { } } | List < String > stringList = ( List < String > ) fetchObject ( key ) ; return stringList . get ( RandomUtils . nextInt ( stringList . size ( ) ) ) ; |
public class MatrixIO { /** * Reads a matrix in which has been encoded using a Column Space Value ( CSV )
* file format . For a description of the format see { @ link MatrixIO # loadCSV ( String , boolean ) } .
* @ param fileName The file being loaded .
* @ param numRows number of rows in the matrix .
* @ param numCols number of columns in the matrix .
* @ return DMatrixRMaj
* @ throws IOException */
public static DMatrixRMaj loadCSV ( String fileName , int numRows , int numCols ) throws IOException { } } | FileInputStream fileStream = new FileInputStream ( fileName ) ; ReadMatrixCsv csv = new ReadMatrixCsv ( fileStream ) ; DMatrixRMaj ret = csv . readDDRM ( numRows , numCols ) ; fileStream . close ( ) ; return ret ; |
public class CPDisplayLayoutUtil { /** * Returns the last cp display layout in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp display layout , or < code > null < / code > if a matching cp display layout could not be found */
public static CPDisplayLayout fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CPDisplayLayout > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; |
public class JmsSessionImpl { /** * This method is called by a JmsMsgConsumer in order to remove itself from
* the list of consumers held by the Session .
* @ param JmsMsgConsumer The Consumer which is calling the method . */
void removeConsumer ( JmsMsgConsumerImpl consumer ) throws JMSException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "removeConsumer" , consumer ) ; // if DUPS _ OK _ ACKNOWLEDGE is in use , commit any messages delivered in the
// last incomplete batch ( if the session is closing it will already have
// done this and the count of uncommitted messages will be zero )
if ( ( acknowledgeMode == Session . DUPS_OK_ACKNOWLEDGE ) && ( uncommittedReceiveCount > 0 ) ) { commitTransaction ( ) ; } syncConsumers . remove ( consumer ) ; asyncConsumers . remove ( consumer ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "removeConsumer" ) ; |
public class WSRdbManagedConnectionImpl { /** * Updates the value of the readOnly property .
* @ param readOnly the new isReadOnly value . */
public final void setReadOnly ( boolean isReadOnly ) throws SQLException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "Set readOnly to " + isReadOnly ) ; sqlConn . setReadOnly ( isReadOnly ) ; connectionPropertyChanged = true ; |
public class DefaultDataEditorWidget { /** * Returns only the detail form widget */
@ Override public Widget createDetailWidget ( ) { } } | return new AbstractWidget ( ) { @ Override public void onAboutToShow ( ) { DefaultDataEditorWidget . this . onAboutToShow ( ) ; } @ Override public void onAboutToHide ( ) { DefaultDataEditorWidget . this . onAboutToHide ( ) ; } public JComponent getComponent ( ) { return getDetailForm ( ) . getControl ( ) ; } @ Override public List < ? extends AbstractCommand > getCommands ( ) { return Arrays . asList ( getDetailForm ( ) . getCommitCommand ( ) ) ; } @ Override public String getId ( ) { return DefaultDataEditorWidget . this . getId ( ) + "." + getDetailForm ( ) . getId ( ) ; } } ; |
public class GVRLight { /** * Bind a { @ code float } to the shader uniform { @ code key } .
* Throws an exception of the key is not found .
* @ param key Name of the shader uniform
* @ param value New data */
public void setFloat ( String key , float value ) { } } | checkKeyIsUniform ( key ) ; checkFloatNotNaNOrInfinity ( "value" , value ) ; NativeLight . setFloat ( getNative ( ) , key , value ) ; |
public class GuardRail { /** * Release acquired permits with known result . Since there is a known result the result
* count object and latency will be updated .
* @ param number of permits to release
* @ param result of the execution
* @ param startNanos of the execution */
public void releasePermits ( long number , Result result , long startNanos ) { } } | releasePermits ( number , result , startNanos , clock . nanoTime ( ) ) ; |
public class SpringExceptionHandler { /** * Checks the interface of SQLException and tries to determine / convert JDBC4 exceptions into
* Spring SQL Exceptions
* @ param reason a description of the exception
* @ param SQLState an XOPEN or SQL : 2003 code identifying the exception
* @ param vendorCode a database vendor - specific exception code
* @ param cause original SQL Exception
* @ return SQL Exception converted into Spring SQL Exception . Null otherwise
* @ throws org . midao . jdbc . core . exception . MjdbcException */
private MjdbcSQLException translateJDBC4Exception ( String reason , String SQLState , int vendorCode , SQLException cause ) throws MjdbcException { } } | MjdbcSQLException result = null ; if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLTransientException" ) == true ) { if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLTransactionRollbackException" ) == true ) { result = new ConcurrencyFailureException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLTransientConnectionException" ) == true ) { result = new TransientDataAccessResourceException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLTimeoutException" ) == true ) { result = new QueryTimeoutException ( reason , SQLState , vendorCode ) ; } } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLNonTransientException" ) == true ) { if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLDataException" ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLFeatureNotSupportedException" ) == true ) { result = new InvalidDataAccessApiUsageException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLIntegrityConstraintViolationException" ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLInvalidAuthorizationSpecException" ) == true ) { result = new PermissionDeniedDataAccessException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLNonTransientConnectionException" ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLSyntaxErrorException" ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } } else if ( MappingUtils . objectAssignableTo ( cause , "java.sql.SQLRecoverableException" ) == true ) { result = new RecoverableDataAccessException ( reason , SQLState , vendorCode ) ; } return result ; |
public class XMemcachedClient { /** * ( non - Javadoc )
* @ see net . rubyeye . xmemcached . MemcachedClient # add ( java . lang . String , int , java . lang . Object ) */
public final boolean add ( final String key , final int exp , final Object value ) throws TimeoutException , InterruptedException , MemcachedException { } } | return this . add ( key , exp , value , this . opTimeout ) ; |
public class DeleteItemRequest { /** * One or more substitution tokens for attribute names in an expression . The following are some use cases for using
* < code > ExpressionAttributeNames < / code > :
* < ul >
* < li >
* To access an attribute whose name conflicts with a DynamoDB reserved word .
* < / li >
* < li >
* To create a placeholder for repeating occurrences of an attribute name in an expression .
* < / li >
* < li >
* To prevent special characters in an attribute name from being misinterpreted in an expression .
* < / li >
* < / ul >
* Use the < b > # < / b > character in an expression to dereference an attribute name . For example , consider the following
* attribute name :
* < ul >
* < li >
* < code > Percentile < / code >
* < / li >
* < / ul >
* The name of this attribute conflicts with a reserved word , so it cannot be used directly in an expression . ( For
* the complete list of reserved words , see < a
* href = " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / ReservedWords . html " > Reserved Words < / a > in
* the < i > Amazon DynamoDB Developer Guide < / i > ) . To work around this , you could specify the following for
* < code > ExpressionAttributeNames < / code > :
* < ul >
* < li >
* < code > { " # P " : " Percentile " } < / code >
* < / li >
* < / ul >
* You could then use this substitution in an expression , as in this example :
* < ul >
* < li >
* < code > # P = : val < / code >
* < / li >
* < / ul >
* < note >
* Tokens that begin with the < b > : < / b > character are < i > expression attribute values < / i > , which are placeholders for
* the actual value at runtime .
* < / note >
* For more information on expression attribute names , see < a href =
* " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . AccessingItemAttributes . html "
* > Accessing Item Attributes < / a > in the < i > Amazon DynamoDB Developer Guide < / i > .
* @ param expressionAttributeNames
* One or more substitution tokens for attribute names in an expression . The following are some use cases for
* using < code > ExpressionAttributeNames < / code > : < / p >
* < ul >
* < li >
* To access an attribute whose name conflicts with a DynamoDB reserved word .
* < / li >
* < li >
* To create a placeholder for repeating occurrences of an attribute name in an expression .
* < / li >
* < li >
* To prevent special characters in an attribute name from being misinterpreted in an expression .
* < / li >
* < / ul >
* Use the < b > # < / b > character in an expression to dereference an attribute name . For example , consider the
* following attribute name :
* < ul >
* < li >
* < code > Percentile < / code >
* < / li >
* < / ul >
* The name of this attribute conflicts with a reserved word , so it cannot be used directly in an expression .
* ( For the complete list of reserved words , see < a
* href = " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / ReservedWords . html " > Reserved
* Words < / a > in the < i > Amazon DynamoDB Developer Guide < / i > ) . To work around this , you could specify the
* following for < code > ExpressionAttributeNames < / code > :
* < ul >
* < li >
* < code > { " # P " : " Percentile " } < / code >
* < / li >
* < / ul >
* You could then use this substitution in an expression , as in this example :
* < ul >
* < li >
* < code > # P = : val < / code >
* < / li >
* < / ul >
* < note >
* Tokens that begin with the < b > : < / b > character are < i > expression attribute values < / i > , which are
* placeholders for the actual value at runtime .
* < / note >
* For more information on expression attribute names , see < a href =
* " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / Expressions . AccessingItemAttributes . html "
* > Accessing Item Attributes < / a > in the < i > Amazon DynamoDB Developer Guide < / i > .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DeleteItemRequest withExpressionAttributeNames ( java . util . Map < String , String > expressionAttributeNames ) { } } | setExpressionAttributeNames ( expressionAttributeNames ) ; return this ; |
public class CollectSerIteratorFactory { /** * Creates an iterator wrapper for a meta - property value .
* @ param value the possible collection - like object , not null
* @ param prop the meta - property defining the value , not null
* @ param beanClass the class of the bean , not the meta - property , for better generics , not null
* @ return the iterator , null if not a collection - like type */
@ Override public SerIterator create ( final Object value , final MetaProperty < ? > prop , Class < ? > beanClass ) { } } | Class < ? > declaredType = prop . propertyType ( ) ; if ( value instanceof Grid ) { Class < ? > valueType = defaultToObjectClass ( JodaBeanUtils . collectionType ( prop , beanClass ) ) ; List < Class < ? > > valueTypeTypes = JodaBeanUtils . collectionTypeTypes ( prop , beanClass ) ; return grid ( ( Grid < ? > ) value , declaredType , valueType , valueTypeTypes ) ; } return super . create ( value , prop , beanClass ) ; |
public class ArrayListJsonDeserializer { /** * < p > newInstance < / p >
* @ param deserializer { @ link JsonDeserializer } used to deserialize the objects inside the { @ link ArrayList } .
* @ param < T > Type of the elements inside the { @ link ArrayList }
* @ return a new instance of { @ link ArrayListJsonDeserializer } */
public static < T > ArrayListJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } } | return new ArrayListJsonDeserializer < T > ( deserializer ) ; |
public class XMLConfigWebFactory { /** * creates a File and his content froma a resurce
* @ param resource
* @ param file
* @ throws IOException */
static void createFileFromResourceCheckSizeDiff ( String resource , Resource file ) throws IOException { } } | ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; IOUtil . copy ( InfoImpl . class . getResourceAsStream ( resource ) , baos , true , false ) ; byte [ ] barr = baos . toByteArray ( ) ; if ( file . exists ( ) ) { long trgSize = file . length ( ) ; long srcSize = barr . length ; if ( srcSize == trgSize ) return ; SystemOut . printDate ( SystemUtil . getPrintWriter ( SystemUtil . OUT ) , "update file:" + file ) ; SystemOut . printDate ( SystemUtil . getPrintWriter ( SystemUtil . OUT ) , " - source:" + srcSize ) ; SystemOut . printDate ( SystemUtil . getPrintWriter ( SystemUtil . OUT ) , " - target:" + trgSize ) ; } else file . createNewFile ( ) ; // SystemOut . printDate ( " write file : " + file ) ;
IOUtil . copy ( new ByteArrayInputStream ( barr ) , file , true ) ; |
public class AbstractJoiner { /** * Sends a split brain join request to the target address and returns the response . */
private SplitBrainJoinMessage sendSplitBrainJoinMessage ( Address target , SplitBrainJoinMessage request ) { } } | if ( logger . isFineEnabled ( ) ) { logger . fine ( "Sending SplitBrainJoinMessage to " + target ) ; } Connection conn = node . getEndpointManager ( MEMBER ) . getOrConnect ( target , true ) ; long timeout = SPLIT_BRAIN_CONN_TIMEOUT_MILLIS ; while ( conn == null ) { timeout -= SPLIT_BRAIN_SLEEP_TIME_MILLIS ; if ( timeout < 0 ) { logger . fine ( "Returning null timeout<0, " + timeout ) ; return null ; } try { // noinspection BusyWait
Thread . sleep ( SPLIT_BRAIN_SLEEP_TIME_MILLIS ) ; } catch ( InterruptedException e ) { currentThread ( ) . interrupt ( ) ; return null ; } conn = node . getEndpointManager ( MEMBER ) . getConnection ( target ) ; } NodeEngine nodeEngine = node . nodeEngine ; Future future = nodeEngine . getOperationService ( ) . createInvocationBuilder ( ClusterServiceImpl . SERVICE_NAME , new SplitBrainMergeValidationOp ( request ) , target ) . setTryCount ( 1 ) . invoke ( ) ; try { return ( SplitBrainJoinMessage ) future . get ( SPLIT_BRAIN_JOIN_CHECK_TIMEOUT_SECONDS , TimeUnit . SECONDS ) ; } catch ( TimeoutException e ) { logger . fine ( "Timeout during join check!" , e ) ; } catch ( Exception e ) { logger . warning ( "Error during join check!" , e ) ; } return null ; |
public class ExprParser { /** * staticMember
* : ID _ list ' : : ' ID
* | ID _ list ' : : ' ID ' ( ' exprList ? ' ) ' */
Expr staticMember ( ) { } } | if ( peek ( ) . sym != Sym . ID ) { return sharedMethod ( ) ; } int begin = forward ; while ( move ( ) . sym == Sym . DOT && move ( ) . sym == Sym . ID ) { ; } // ID . ID . ID : :
if ( peek ( ) . sym != Sym . STATIC || tokenList . get ( forward - 1 ) . sym != Sym . ID ) { resetForward ( begin ) ; return sharedMethod ( ) ; } String clazz = getClazz ( begin ) ; match ( Sym . STATIC ) ; String memberName = match ( Sym . ID ) . value ( ) ; // com . jfinal . kit . Str : : isBlank ( str )
if ( peek ( ) . sym == Sym . LPAREN ) { move ( ) ; if ( peek ( ) . sym == Sym . RPAREN ) { move ( ) ; return new StaticMethod ( clazz , memberName , location ) ; } ExprList exprList = exprList ( ) ; match ( Sym . RPAREN ) ; return new StaticMethod ( clazz , memberName , exprList , location ) ; } // com . jfinal . core . Const : : JFINAL _ VERSION
return new StaticField ( clazz , memberName , location ) ; |
public class AsyncFile { /** * Creates new file and writes a sequence of bytes to this file from
* the given buffer , starting at the given file position .
* If file already exists , promise fails with exception .
* @ param path the path of the file to create and write
* @ param buf the buffer from which bytes are to be transferred byteBuffer */
public static Promise < Void > writeNewFile ( Executor executor , Path path , ByteBuf buf ) { } } | return openAsync ( executor , path , set ( WRITE , CREATE_NEW ) ) . then ( file -> file . write ( buf ) . then ( $ -> file . close ( ) ) ) . whenException ( $ -> buf . recycle ( ) ) ; |
public class Packer { /** * Add gridWidth = REMAINDER to the constraints for the current component if
* how = = true 1 it if false . */
public Packer setRemainY ( final boolean how ) { } } | if ( how == true ) { gc . gridheight = GridBagConstraints . REMAINDER ; } else { gc . gridheight = 1 ; } setConstraints ( comp , gc ) ; return this ; |
public class ExampleUtils { /** * Deletes the Google Cloud Pub / Sub topic .
* @ throws IOException if there is a problem deleting the Pub / Sub topic */
private void deletePubsubTopic ( String topic ) throws IOException { } } | if ( pubsubClient == null ) { pubsubClient = newPubsubClient ( options . as ( PubsubOptions . class ) ) . build ( ) ; } if ( executeNullIfNotFound ( pubsubClient . projects ( ) . topics ( ) . get ( topic ) ) != null ) { pubsubClient . projects ( ) . topics ( ) . delete ( topic ) . execute ( ) ; } |
public class RtfByteArrayBuffer { /** * Copies the given array to the internal buffer .
* @ param src */
public void write ( final byte [ ] src ) { } } | if ( src == null ) throw new NullPointerException ( ) ; if ( src . length < buffer . length - pos ) { System . arraycopy ( src , 0 , buffer , pos , src . length ) ; pos += src . length ; size += src . length ; return ; } writeLoop ( src , 0 , src . length ) ; |
public class QueryParametersLazyList { /** * Inserts new row into returned ResultSet
* @ param params values which would be used to fill newly inserted row
* @ throws SQLException */
public void insert ( QueryParameters params ) throws SQLException { } } | getCurrentResultSet ( ) . moveToInsertRow ( ) ; updateResultSetCurrentLine ( getCurrentResultSet ( ) , params ) ; getCurrentResultSet ( ) . insertRow ( ) ; getCurrentResultSet ( ) . moveToCurrentRow ( ) ; |
public class Sign1Message { /** * Create a signature for the message if one does not exist .
* @ param key key to use to sign the message
* @ exception CoseException Errors generated by the COSE module */
public void sign ( OneKey key ) throws CoseException { } } | if ( rgbContent == null ) throw new CoseException ( "No Content Specified" ) ; if ( rgbSignature != null ) return ; if ( rgbProtected == null ) { if ( objProtected . size ( ) > 0 ) rgbProtected = objProtected . EncodeToBytes ( ) ; else rgbProtected = new byte [ 0 ] ; } CBORObject obj = CBORObject . NewArray ( ) ; obj . Add ( contextString ) ; obj . Add ( rgbProtected ) ; obj . Add ( externalData ) ; obj . Add ( rgbContent ) ; rgbSignature = computeSignature ( obj . EncodeToBytes ( ) , key ) ; ProcessCounterSignatures ( ) ; |
public class TaskManagerService { /** * Execute the given task and return the task execution status . If the task is already
* running by this or another task manager , this method blocks until the task has
* finished . ( TODO : should we put a timeout on this ? ) Once the task is not running , it
* then executed , even if we had to wait for a previous execution . In a multi - node
* cluster , the task may be executed by another node if just happened to be scheduled
* for execution , but in most cases , this task manager will execute the task .
* Regardless of whether this or a remote task manager executes the task , this method
* waits for that execution to complete and returns the final task status .
* @ param appDef { @ link ApplicationDefinition } that defines the task ' s context
* including its tenant .
* @ param task Application - specific { @ link Task } to execute .
* @ return Final { @ link TaskStatus } of the task ' s execution . */
public TaskStatus executeTask ( ApplicationDefinition appDef , Task task ) { } } | checkServiceState ( ) ; Tenant tenant = Tenant . getTenant ( appDef ) ; m_logger . debug ( "Checking that task {} in tenant {} is not running" , task . getTaskID ( ) , tenant ) ; TaskRecord taskRecord = null ; synchronized ( m_executeLock ) { taskRecord = waitForTaskStatus ( tenant , task , s -> s != TaskStatus . IN_PROGRESS ) ; taskRecord . setStatus ( TaskStatus . NEVER_EXECUTED ) ; updateTaskStatus ( tenant , taskRecord , false ) ; attemptToExecuteTask ( appDef , task , taskRecord ) ; } m_logger . debug ( "Checking that task {} in tenant {} has completed" , tenant , task . getTaskID ( ) ) ; taskRecord = waitForTaskStatus ( tenant , task , s -> TaskStatus . isCompleted ( s ) ) ; return taskRecord . getStatus ( ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.