signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Kafka { /** * Configures to start reading partitions from specific offsets , set independently for each partition . * Resets previously set offsets . * @ param specificOffsets the specified offsets for partitions * @ see FlinkKafkaConsumerBase # setStartFromSpecificOffsets ( Map ) */ public Kafka startFromSpecificOffsets ( Map < Integer , Long > specificOffsets ) { } }
this . startupMode = StartupMode . SPECIFIC_OFFSETS ; this . specificOffsets = Preconditions . checkNotNull ( specificOffsets ) ; return this ;
public class InternalUtils { /** * Get the Struts ModuleConfig for the given module path . */ public static ModuleConfig getModuleConfig ( String modulePath , ServletContext context ) { } }
return ( ModuleConfig ) context . getAttribute ( Globals . MODULE_KEY + modulePath ) ;
public class WorkItem { /** * Add secondary types patterns ( not nested in the type itself but contained * in the java file ) * @ param fileName * java file name ( not path ! ) without . java suffix * @ param classNamePattern * non null pattern for all matching . class file names * @ return modified classNamePattern , if there are more then one type * defined in the java file */ private static String addSecondaryTypesToPattern ( IFile file , String fileName , String classNamePattern ) { } }
ICompilationUnit cu = JavaCore . createCompilationUnitFrom ( file ) ; if ( cu == null ) { FindbugsPlugin . getDefault ( ) . logError ( "NULL compilation unit for " + file + ", FB analysis might be incomplete for included types" ) ; return classNamePattern ; } try { IType [ ] types = cu . getTypes ( ) ; if ( types . length > 1 ) { StringBuilder sb = new StringBuilder ( classNamePattern ) ; for ( IType type : types ) { if ( fileName . equals ( type . getElementName ( ) ) ) { // " usual " type with the same name : we have it already continue ; } sb . append ( "|" ) . append ( type . getElementName ( ) ) ; sb . append ( "\\.class|" ) . append ( type . getElementName ( ) ) ; sb . append ( "\\$.*\\.class" ) ; } classNamePattern = sb . toString ( ) ; } } catch ( JavaModelException e ) { FindbugsPlugin . getDefault ( ) . logException ( e , "Cannot get types from compilation unit: " + cu ) ; } return classNamePattern ;
public class TypeConformanceComputer { /** * Keeps the cumulated distance for all the common raw super types of the given references . * Interfaces that are more directly implemented will get a lower total count than more general * interfaces . */ protected void cumulateDistance ( final List < LightweightTypeReference > references , Multimap < JvmType , LightweightTypeReference > all , Multiset < JvmType > cumulatedDistance ) { } }
for ( LightweightTypeReference other : references ) { Multiset < JvmType > otherDistance = LinkedHashMultiset . create ( ) ; initializeDistance ( other , all , otherDistance ) ; cumulatedDistance . retainAll ( otherDistance ) ; for ( Multiset . Entry < JvmType > typeToDistance : otherDistance . entrySet ( ) ) { if ( cumulatedDistance . contains ( typeToDistance . getElement ( ) ) ) cumulatedDistance . add ( typeToDistance . getElement ( ) , typeToDistance . getCount ( ) ) ; } }
public class ComputeNodesImpl { /** * Restarts the specified compute node . * You can restart a node only if it is in an idle or running state . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node that you want to restart . * @ param nodeRebootOption When to reboot the compute node and what to do with currently running tasks . The default value is requeue . Possible values include : ' requeue ' , ' terminate ' , ' taskCompletion ' , ' retainedData ' * @ param computeNodeRebootOptions Additional parameters for the operation * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > rebootAsync ( String poolId , String nodeId , ComputeNodeRebootOption nodeRebootOption , ComputeNodeRebootOptions computeNodeRebootOptions , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromHeaderResponse ( rebootWithServiceResponseAsync ( poolId , nodeId , nodeRebootOption , computeNodeRebootOptions ) , serviceCallback ) ;
public class CPRuleUtil { /** * Returns a range of all the cp rules where groupId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPRuleModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param groupId the group ID * @ param start the lower bound of the range of cp rules * @ param end the upper bound of the range of cp rules ( not inclusive ) * @ return the range of matching cp rules */ public static List < CPRule > findByGroupId ( long groupId , int start , int end ) { } }
return getPersistence ( ) . findByGroupId ( groupId , start , end ) ;
public class IntTupleIterators { /** * Returns an iterator that returns { @ link MutableIntTuple } s in the * given range . < br > * < br > * Copies of the given tuples will be stored internally . < br > * < br > * Also see < a href = " . . / . . / package - summary . html # IterationOrder " > * Iteration Order < / a > * @ param min The minimum values , inclusive * @ param max The maximum values , exclusive * @ return The iterator * @ throws IllegalArgumentException If the given tuples do not * have the same { @ link Tuple # getSize ( ) size } */ public static Iterator < MutableIntTuple > lexicographicalIterator ( IntTuple min , IntTuple max ) { } }
Utils . checkForEqualSize ( min , max ) ; IntTuple localMin = IntTuples . copy ( min ) ; IntTuple localMax = IntTuples . copy ( max ) ; return new IntTupleIterator ( localMin , localMax , IntTupleIncrementors . lexicographicalIncrementor ( ) ) ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcCurrencyEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class LetterboxingViewport { /** * Forces update of current world size according to window size . Will try to keep the set aspect ratio . * @ param screenWidth current screen width . * @ param screenHeight current screen height . */ private void updateWorldSize ( final int screenWidth , final int screenHeight ) { } }
final float width = screenWidth * scaleX ; final float height = screenHeight * scaleY ; final float fitHeight = width / aspectRatio ; if ( fitHeight > height ) { setWorldSize ( height * aspectRatio , height ) ; } else { setWorldSize ( width , fitHeight ) ; }
public class HelpModule { /** * Encodes the specified URL path , which may be absolute or relative . * For compressed help sets , use the web / prefix . When using this syntax , CWF requires that the * root folder in the jar file be named " web " with the specified path being relative to that * folder . * @ param url URL to encode . * @ return The encoded URL . */ private String encodeURL ( String url ) { } }
return url . startsWith ( "/" ) || url . startsWith ( "." ) ? url : url . startsWith ( "web/" ) ? "/" + url : "/web/" + url ;
public class Wings { /** * Unsubscribes to link state changes of the endpoints . * @ param object the { @ link java . lang . Object } you wish to unsubscribe . * @ throws IllegalStateException Wings must be initialized . See { @ link Wings # init ( IWingsModule , Class [ ] ) } . */ public static void unsubscribe ( Object object ) throws IllegalStateException { } }
if ( ! sIsInitialized ) { throw new IllegalStateException ( "Wings must be initialized. See Wings#init()." ) ; } WingsInjector . getBus ( ) . unregister ( object ) ;
public class SemanticSearchServiceImpl { /** * Computes the best matching synonym which is closest to a set of search terms . < br > * Will stem the { @ link OntologyTerm } ' s synonyms and the search terms , and then compute the * maximum { @ link StringDistance } between them . 0 means disjunct , 1 means identical * @ param ontologyTerm the { @ link OntologyTerm } * @ param searchTerms the search terms * @ return the maximum { @ link StringDistance } between the ontologyterm and the search terms */ Hit < String > bestMatchingSynonym ( OntologyTerm ontologyTerm , Set < String > searchTerms ) { } }
// ontologyTerm . getSynonyms ( ) will never be empty because it contains itself as a synonym Optional < Hit < String > > bestSynonym = ontologyTerm . getSynonyms ( ) . stream ( ) . map ( synonym -> Hit . create ( synonym , distanceFrom ( synonym , searchTerms ) ) ) . max ( Comparator . naturalOrder ( ) ) ; return bestSynonym . orElseThrow ( ( ) -> new IllegalStateException ( "ontologyTerm.getSynonyms() shouldn't return an empty list" ) ) ;
public class ApacheUtils { /** * Returns a new Credentials Provider for use with proxy authentication . */ public static CredentialsProvider newProxyCredentialsProvider ( HttpClientSettings settings ) { } }
final CredentialsProvider provider = new BasicCredentialsProvider ( ) ; provider . setCredentials ( newAuthScope ( settings ) , newNTCredentials ( settings ) ) ; return provider ;
public class SlideShowView { /** * Set the playlist * @ param playlist The playlist to set */ public void setPlaylist ( PlayList playlist ) { } }
this . playlist = playlist ; if ( adapter != null ) { playlist . onSlideCountChanged ( adapter . getCount ( ) ) ; }
public class Types { /** * Return the least upper bound ( lub ) of set of types . If the lub * does not exist return the type of null ( bottom ) . */ public Type lub ( Type ... ts ) { } }
final int UNKNOWN_BOUND = 0 ; final int ARRAY_BOUND = 1 ; final int CLASS_BOUND = 2 ; int [ ] kinds = new int [ ts . length ] ; int boundkind = UNKNOWN_BOUND ; for ( int i = 0 ; i < ts . length ; i ++ ) { Type t = ts [ i ] ; switch ( t . getTag ( ) ) { case CLASS : boundkind |= kinds [ i ] = CLASS_BOUND ; break ; case ARRAY : boundkind |= kinds [ i ] = ARRAY_BOUND ; break ; case TYPEVAR : do { t = t . getUpperBound ( ) ; } while ( t . hasTag ( TYPEVAR ) ) ; if ( t . hasTag ( ARRAY ) ) { boundkind |= kinds [ i ] = ARRAY_BOUND ; } else { boundkind |= kinds [ i ] = CLASS_BOUND ; } break ; default : kinds [ i ] = UNKNOWN_BOUND ; if ( t . isPrimitive ( ) ) return syms . errType ; } } switch ( boundkind ) { case 0 : return syms . botType ; case ARRAY_BOUND : // calculate lub ( A [ ] , B [ ] ) Type [ ] elements = new Type [ ts . length ] ; for ( int i = 0 ; i < ts . length ; i ++ ) { Type elem = elements [ i ] = elemTypeFun . apply ( ts [ i ] ) ; if ( elem . isPrimitive ( ) ) { // if a primitive type is found , then return // arraySuperType unless all the types are the // same Type first = ts [ 0 ] ; for ( int j = 1 ; j < ts . length ; j ++ ) { if ( ! isSameType ( first , ts [ j ] ) ) { // lub ( int [ ] , B [ ] ) is Cloneable & Serializable return arraySuperType ( ) ; } } // all the array types are the same , return one // lub ( int [ ] , int [ ] ) is int [ ] return first ; } } // lub ( A [ ] , B [ ] ) is lub ( A , B ) [ ] return new ArrayType ( lub ( elements ) , syms . arrayClass ) ; case CLASS_BOUND : // calculate lub ( A , B ) int startIdx = 0 ; for ( int i = 0 ; i < ts . length ; i ++ ) { Type t = ts [ i ] ; if ( t . hasTag ( CLASS ) || t . hasTag ( TYPEVAR ) ) { break ; } else { startIdx ++ ; } } Assert . check ( startIdx < ts . length ) ; // step 1 - compute erased candidate set ( EC ) List < Type > cl = erasedSupertypes ( ts [ startIdx ] ) ; for ( int i = startIdx + 1 ; i < ts . length ; i ++ ) { Type t = ts [ i ] ; if ( t . hasTag ( CLASS ) || t . hasTag ( TYPEVAR ) ) cl = intersect ( cl , erasedSupertypes ( t ) ) ; } // step 2 - compute minimal erased candidate set ( MEC ) List < Type > mec = closureMin ( cl ) ; // step 3 - for each element G in MEC , compute lci ( Inv ( G ) ) List < Type > candidates = List . nil ( ) ; for ( Type erasedSupertype : mec ) { List < Type > lci = List . of ( asSuper ( ts [ startIdx ] , erasedSupertype . tsym ) ) ; for ( int i = startIdx + 1 ; i < ts . length ; i ++ ) { Type superType = asSuper ( ts [ i ] , erasedSupertype . tsym ) ; lci = intersect ( lci , superType != null ? List . of ( superType ) : List . < Type > nil ( ) ) ; } candidates = candidates . appendList ( lci ) ; } // step 4 - let MEC be { G1 , G2 . . . Gn } , then we have that // lub = lci ( Inv ( G1 ) ) & lci ( Inv ( G2 ) ) & . . . & lci ( Inv ( Gn ) ) return compoundMin ( candidates ) ; default : // calculate lub ( A , B [ ] ) List < Type > classes = List . of ( arraySuperType ( ) ) ; for ( int i = 0 ; i < ts . length ; i ++ ) { if ( kinds [ i ] != ARRAY_BOUND ) // Filter out any arrays classes = classes . prepend ( ts [ i ] ) ; } // lub ( A , B [ ] ) is lub ( A , arraySuperType ) return lub ( classes ) ; }
public class PartialUniqueIndex { /** * Removes and returns the entry associated with the specified key * in the HashMap . Returns null if the HashMap contains no mapping * for this key . */ public Object remove ( Object key ) { } }
Object underlying = this . underlyingObjectGetter . getUnderlyingObject ( key ) ; return removeUsingUnderlying ( underlying ) ;
public class ExcelFunctions { /** * Returns the number of days between two dates . */ public static int days ( EvaluationContext ctx , Object endDate , Object startDate ) { } }
return datedif ( ctx , startDate , endDate , "d" ) ;
public class SecurityActions { /** * Create a new instance by finding a constructor that matches the argumentTypes signature using the arguments for * instantiation . * @ param className * Full classname of class to create * @ param argumentTypes * The constructor argument types * @ param arguments * The constructor arguments * @ param cl * The ClassLoader to use in constructing the new instance * @ return a new instance * @ throws IllegalArgumentException * if className , argumentTypes , ClassLoader , or arguments are null * @ throws RuntimeException * if any exceptions during creation * @ author < a href = " mailto : aslak @ conduct . no " > Aslak Knutsen < / a > * @ author < a href = " mailto : andrew . rubinger @ jboss . org " > ALR < / a > */ static < T > T newInstance ( final String className , final Class < ? > [ ] argumentTypes , final Object [ ] arguments , final Class < T > expectedType , final ClassLoader cl ) { } }
// Precondition checks if ( className == null ) { throw new IllegalArgumentException ( "ClassName must be specified" ) ; } if ( argumentTypes == null ) { throw new IllegalArgumentException ( "ArgumentTypes must be specified. Use empty array if no arguments" ) ; } if ( arguments == null ) { throw new IllegalArgumentException ( "Arguments must be specified. Use empty array if no arguments" ) ; } if ( expectedType == null ) { throw new IllegalArgumentException ( "Expected type must be specified" ) ; } if ( cl == null ) { throw new IllegalArgumentException ( "CL must be specified" ) ; } final Class < ? > implClass ; try { implClass = Class . forName ( className , false , cl ) ; } catch ( ClassNotFoundException cnfe ) { throw new IllegalArgumentException ( "Could not find class named " + className + " in the specified CL: " + cl , cnfe ) ; } // Delegate return newInstance ( implClass , argumentTypes , arguments , expectedType ) ;
public class InProcessLauncher { /** * Starts a Spark application . * @ see AbstractLauncher # startApplication ( SparkAppHandle . Listener . . . ) * @ param listeners Listeners to add to the handle before the app is launched . * @ return A handle for the launched application . */ @ Override public SparkAppHandle startApplication ( SparkAppHandle . Listener ... listeners ) throws IOException { } }
if ( builder . isClientMode ( builder . getEffectiveConfig ( ) ) ) { LOG . warning ( "It's not recommended to run client-mode applications using InProcessLauncher." ) ; } Method main = findSparkSubmit ( ) ; LauncherServer server = LauncherServer . getOrCreateServer ( ) ; InProcessAppHandle handle = new InProcessAppHandle ( server ) ; for ( SparkAppHandle . Listener l : listeners ) { handle . addListener ( l ) ; } String secret = server . registerHandle ( handle ) ; setConf ( LauncherProtocol . CONF_LAUNCHER_PORT , String . valueOf ( server . getPort ( ) ) ) ; setConf ( LauncherProtocol . CONF_LAUNCHER_SECRET , secret ) ; List < String > sparkArgs = builder . buildSparkSubmitArgs ( ) ; String [ ] argv = sparkArgs . toArray ( new String [ sparkArgs . size ( ) ] ) ; String appName = CommandBuilderUtils . firstNonEmpty ( builder . appName , builder . mainClass , "<unknown>" ) ; handle . start ( appName , main , argv ) ; return handle ;
public class PageExceptionImpl { /** * set a additional key value * @ param key * @ param value */ public void setAdditional ( Collection . Key key , Object value ) { } }
additional . setEL ( key , StringUtil . toStringEmptyIfNull ( value ) ) ;
public class Output { /** * { @ inheritDoc } */ @ Override public void writeString ( String string ) { } }
final byte [ ] encoded = encodeString ( string ) ; final int len = encoded . length ; if ( len < AMF . LONG_STRING_LENGTH ) { buf . put ( AMF . TYPE_STRING ) ; // write unsigned short buf . put ( ( byte ) ( ( len >> 8 ) & 0xff ) ) ; buf . put ( ( byte ) ( len & 0xff ) ) ; } else { buf . put ( AMF . TYPE_LONG_STRING ) ; buf . putInt ( len ) ; } buf . put ( encoded ) ;
public class StringUtils { /** * < p > Searches a CharSequence to find the first index of any * character not in the given set of characters . < / p > * < p > A { @ code null } CharSequence will return { @ code - 1 } . * A { @ code null } or zero length search array will return { @ code - 1 } . < / p > * < pre > * StringUtils . indexOfAnyBut ( null , * ) = - 1 * StringUtils . indexOfAnyBut ( " " , * ) = - 1 * StringUtils . indexOfAnyBut ( * , null ) = - 1 * StringUtils . indexOfAnyBut ( * , [ ] ) = - 1 * StringUtils . indexOfAnyBut ( " zzabyycdxx " , new char [ ] { ' z ' , ' a ' } ) = 3 * StringUtils . indexOfAnyBut ( " aba " , new char [ ] { ' z ' } ) = 0 * StringUtils . indexOfAnyBut ( " aba " , new char [ ] { ' a ' , ' b ' } ) = - 1 * < / pre > * @ param cs the CharSequence to check , may be null * @ param searchChars the chars to search for , may be null * @ return the index of any of the chars , - 1 if no match or null input * @ since 2.0 * @ since 3.0 Changed signature from indexOfAnyBut ( String , char [ ] ) to indexOfAnyBut ( CharSequence , char . . . ) */ public static int indexOfAnyBut ( final CharSequence cs , final char ... searchChars ) { } }
if ( isEmpty ( cs ) || ArrayUtils . isEmpty ( searchChars ) ) { return INDEX_NOT_FOUND ; } final int csLen = cs . length ( ) ; final int csLast = csLen - 1 ; final int searchLen = searchChars . length ; final int searchLast = searchLen - 1 ; outer : for ( int i = 0 ; i < csLen ; i ++ ) { final char ch = cs . charAt ( i ) ; for ( int j = 0 ; j < searchLen ; j ++ ) { if ( searchChars [ j ] == ch ) { if ( i < csLast && j < searchLast && Character . isHighSurrogate ( ch ) ) { if ( searchChars [ j + 1 ] == cs . charAt ( i + 1 ) ) { continue outer ; } } else { continue outer ; } } } return i ; } return INDEX_NOT_FOUND ;
public class ZapNTLMEngineImpl { /** * Calculate a challenge block */ private static byte [ ] makeRandomChallenge ( final Random random ) throws AuthenticationException { } }
final byte [ ] rval = new byte [ 8 ] ; synchronized ( random ) { random . nextBytes ( rval ) ; } return rval ;
public class AbstractLexicalAnalyzer { /** * 分词 * @ param sentence 文本 * @ param normalized 正规化后的文本 * @ param wordList 储存单词列表 * @ param attributeList 储存用户词典中的词性 , 设为null表示不查询用户词典 */ protected void segment ( final String sentence , final String normalized , final List < String > wordList , final List < CoreDictionary . Attribute > attributeList ) { } }
if ( attributeList != null ) { final int [ ] offset = new int [ ] { 0 } ; CustomDictionary . parseLongestText ( sentence , new AhoCorasickDoubleArrayTrie . IHit < CoreDictionary . Attribute > ( ) { @ Override public void hit ( int begin , int end , CoreDictionary . Attribute value ) { if ( begin != offset [ 0 ] ) { segmentAfterRule ( sentence . substring ( offset [ 0 ] , begin ) , normalized . substring ( offset [ 0 ] , begin ) , wordList ) ; } while ( attributeList . size ( ) < wordList . size ( ) ) attributeList . add ( null ) ; wordList . add ( sentence . substring ( begin , end ) ) ; attributeList . add ( value ) ; assert wordList . size ( ) == attributeList . size ( ) : "词语列表与属性列表不等长" ; offset [ 0 ] = end ; } } ) ; if ( offset [ 0 ] != sentence . length ( ) ) { segmentAfterRule ( sentence . substring ( offset [ 0 ] ) , normalized . substring ( offset [ 0 ] ) , wordList ) ; } } else { segmentAfterRule ( sentence , normalized , wordList ) ; }
public class PhaseThreeApplication { /** * Stage five parameter equivalencing . * @ param network the { @ link ProtoNetwork network } to equivalence * @ param equivalences the { @ link Set set } of { @ link EquivalenceDataIndex } * @ param bldr the { @ link StringBuilder } * @ return the { @ code int } count of parameter equivalences */ private int stage5Parameter ( final ProtoNetwork network , Set < EquivalenceDataIndex > equivalences , final StringBuilder bldr ) { } }
bldr . append ( "Equivalencing parameters" ) ; stageOutput ( bldr . toString ( ) ) ; ProtoNetwork ret = network ; int ct = 0 ; try { ct = p2 . stage3EquivalenceParameters ( ret , equivalences ) ; stageOutput ( "(" + ct + " equivalences)" ) ; } catch ( IOException ioex ) { final String err = ioex . getMessage ( ) ; fatal ( err ) ; } return ct ;
public class IoServiceListenerSupport { /** * Calls { @ link IoServiceListener # sessionCreated ( IoSession ) } for all registered listeners . */ public void fireSessionCreated ( IoSession session ) { } }
boolean firstSession = false ; if ( session . getService ( ) instanceof IoConnector ) { synchronized ( managedSessions ) { firstSession = managedSessions . isEmpty ( ) ; } } // If already registered , ignore . if ( managedSessions . putIfAbsent ( session . getId ( ) , session ) != null ) { return ; } // If the first connector session , fire a virtual service activation event . if ( firstSession ) { fireServiceActivated ( ) ; } // Fire session events . IoFilterChain filterChain = session . getFilterChain ( ) ; filterChain . fireSessionCreated ( ) ; filterChain . fireSessionOpened ( ) ; int managedSessionCount = managedSessions . size ( ) ; if ( managedSessionCount > largestManagedSessionCount ) { largestManagedSessionCount = managedSessionCount ; } cumulativeManagedSessionCount ++ ; // Fire listener events . for ( IoServiceListener l : listeners ) { try { l . sessionCreated ( session ) ; } catch ( Throwable e ) { ExceptionMonitor . getInstance ( ) . exceptionCaught ( e ) ; } }
public class Interval { /** * Reads a whole number from the given field of the node . Accepts numbers , * numerical strings and fractions . * @ param node node from which to read * @ param field name of the field to read * @ return the field ' s int value * @ throws NumberFormatException if the content cannot be parsed to a number */ static int readInt ( JsonNode node , String field ) throws NumberFormatException { } }
String stringValue = node . get ( field ) . asText ( ) ; return ( int ) Float . parseFloat ( stringValue ) ;
public class FullyQualifiedNameImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . FULLY_QUALIFIED_NAME__FQN_TYPE : return FQN_TYPE_EDEFAULT == null ? fqnType != null : ! FQN_TYPE_EDEFAULT . equals ( fqnType ) ; case AfplibPackage . FULLY_QUALIFIED_NAME__FQN_FORMAT : return FQN_FORMAT_EDEFAULT == null ? fqnFormat != null : ! FQN_FORMAT_EDEFAULT . equals ( fqnFormat ) ; case AfplibPackage . FULLY_QUALIFIED_NAME__FQ_NAME : return FQ_NAME_EDEFAULT == null ? fqName != null : ! FQ_NAME_EDEFAULT . equals ( fqName ) ; } return super . eIsSet ( featureID ) ;
public class ReflectKit { /** * Load a class according to the class name . * @ param typeName * @ return */ public static Class < ? > form ( String typeName ) { } }
try { return Class . forName ( typeName ) ; } catch ( Exception e ) { log . warn ( "Class.forName fail" , e . getMessage ( ) ) ; return null ; }
public class GeometryExpression { /** * Returns a geometric object that represents the Point * set difference of this geometric object with anotherGeometry . * @ param geometry other geometry * @ return difference between this and the other geometry */ public GeometryExpression < Geometry > difference ( Expression < ? extends Geometry > geometry ) { } }
return GeometryExpressions . geometryOperation ( SpatialOps . DIFFERENCE , mixin , geometry ) ;
public class FNPRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . FNPRG__RESERVED : setReserved ( RESERVED_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__LC_HEIGHT : setLcHeight ( LC_HEIGHT_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__CAP_MHT : setCapMHt ( CAP_MHT_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__MAX_ASC_HT : setMaxAscHt ( MAX_ASC_HT_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__MAX_DES_DP : setMaxDesDp ( MAX_DES_DP_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__RESERVED2 : setReserved2 ( RESERVED2_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__RETIRED : setRetired ( RETIRED_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__RESERVED3 : setReserved3 ( RESERVED3_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__USCORE_WD : setUscoreWd ( USCORE_WD_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__USCORE_WDF : setUscoreWdf ( USCORE_WDF_EDEFAULT ) ; return ; case AfplibPackage . FNPRG__USCORE_POS : setUscorePos ( USCORE_POS_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class Dim { /** * Returns whether the given string is syntactically valid script . */ public boolean stringIsCompilableUnit ( String str ) { } }
DimIProxy action = new DimIProxy ( this , IPROXY_STRING_IS_COMPILABLE ) ; action . text = str ; action . withContext ( ) ; return action . booleanResult ;
public class SchemaService { /** * Delete the given application , including all of its data , from the default tenant . * If the given application doesn ' t exist , the call is a no - op . WARNING : This method * deletes an application regardless of whether it has a key defined . * @ param appName Name of application to delete in default tenant . */ public void deleteApplication ( String appName ) { } }
checkServiceState ( ) ; ApplicationDefinition appDef = getApplication ( appName ) ; if ( appDef == null ) { return ; } deleteApplication ( appName , appDef . getKey ( ) ) ;
public class Quaternionf { /** * Add < code > q2 < / code > to this quaternion . * @ param q2 * the quaternion to add to this * @ return this */ public Quaternionf add ( Quaternionfc q2 ) { } }
x += q2 . x ( ) ; y += q2 . y ( ) ; z += q2 . z ( ) ; w += q2 . w ( ) ; return this ;
public class V1InstanceCreator { /** * Creates a new Regression Suite with title assigned with this Regression Plan . * @ param name Title of the suite . * @ param regressionPlan Regression Plan to assign . * @ return A newly minted Regression Suite that exists in the VersionOne system . */ public RegressionSuite regressionSuite ( String name , RegressionPlan regressionPlan ) { } }
return regressionSuite ( name , regressionPlan , null ) ;
public class LineSearchMore94 { /** * Configures the line search . * @ param ftol Tolerance for sufficient decrease . ftol { @ code > } 0 . Smaller value for loose tolerance . Try 1e - 4 * @ param gtol Tolerance for curvature condition . gtol & ge ; 0 . Larger value for loose tolerance . Try 0.9 * @ param xtol Relative tolerance for acceptable step . xtol & ge ; 0 . Larger value for loose tolerance . Try 1e - 4. * @ return Reference to this class to allow for command chaining . */ public LineSearchMore94 setConvergence ( double ftol , double gtol , double xtol ) { } }
if ( ftol < 0 ) throw new IllegalArgumentException ( "ftol must be >= 0 " ) ; if ( gtol < 0 ) throw new IllegalArgumentException ( "gtol must be >= 0 " ) ; if ( xtol < 0 ) throw new IllegalArgumentException ( "xtol must be >= 0 " ) ; this . ftol = ftol ; this . gtol = gtol ; this . xtol = xtol ; return this ;
public class JsDocInfoParser { /** * Parse a { @ code @ suppress } tag of the form * { @ code @ suppress & # 123 ; warning1 | warning2 & # 125 ; } . * @ param token The current token . */ private JsDocToken parseSuppressTag ( JsDocToken token ) { } }
if ( token != JsDocToken . LEFT_CURLY ) { addParserWarning ( "msg.jsdoc.suppress" ) ; return token ; } else { Set < String > suppressions = new HashSet < > ( ) ; while ( true ) { if ( match ( JsDocToken . STRING ) ) { String name = stream . getString ( ) ; if ( ! suppressionNames . contains ( name ) ) { addParserWarning ( "msg.jsdoc.suppress.unknown" , name ) ; } suppressions . add ( stream . getString ( ) ) ; token = next ( ) ; } else { addParserWarning ( "msg.jsdoc.suppress" ) ; return token ; } if ( match ( JsDocToken . PIPE , JsDocToken . COMMA ) ) { token = next ( ) ; } else { break ; } } if ( ! match ( JsDocToken . RIGHT_CURLY ) ) { addParserWarning ( "msg.jsdoc.suppress" ) ; } else { token = next ( ) ; jsdocBuilder . recordSuppressions ( suppressions ) ; } return eatUntilEOLIfNotAnnotation ( ) ; }
public class FSDataset { /** * Find the file corresponding to the block and return it if it exists . */ File validateBlockFile ( int namespaceId , Block b ) throws IOException { } }
return getValidateBlockFile ( namespaceId , b , false ) ;
public class AbstractUrl { /** * { @ inheritDoc } */ public boolean isNextIn ( Context context ) { } }
Source source = context . getSource ( ) ; return parseLength ( source , source . getOffset ( ) , context . getTerminator ( ) ) >= 0 ;
public class GraphEntityMapper { /** * Converts a property stored in Neo4J ( nodes or relationship ) to * corresponding entity field value */ public Object fromNeo4JObject ( Object source , Field field ) { } }
Class < ? > targetClass = field . getType ( ) ; if ( targetClass . isAssignableFrom ( BigDecimal . class ) || targetClass . isAssignableFrom ( BigInteger . class ) ) { return PropertyAccessorHelper . fromSourceToTargetClass ( field . getType ( ) , source . getClass ( ) , source ) ; } else if ( targetClass . isAssignableFrom ( Calendar . class ) || targetClass . isAssignableFrom ( GregorianCalendar . class ) ) { Date d = ( Date ) PropertyAccessorHelper . fromSourceToTargetClass ( Date . class , source . getClass ( ) , source ) ; Calendar cal = Calendar . getInstance ( ) ; cal . setTime ( d ) ; return cal ; } else if ( targetClass . isAssignableFrom ( Date . class ) ) { return PropertyAccessorHelper . fromSourceToTargetClass ( field . getType ( ) , source . getClass ( ) , source ) ; } else { return source ; }
public class RevocationChecker { /** * We have a cert whose revocation status couldn ' t be verified by * a CRL issued by the cert that issued the CRL . See if we can * find a valid CRL issued by a separate key that can verify the * revocation status of this certificate . * Note that this does not provide support for indirect CRLs , * only CRLs signed with a different key ( but the same issuer * name ) as the certificate being checked . * @ param currCert the < code > X509Certificate < / code > to be checked * @ param prevKey the < code > PublicKey < / code > that failed * @ param signFlag < code > true < / code > if that key was trusted to sign CRLs * @ param stackedCerts a < code > Set < / code > of < code > X509Certificate < / code > s > * whose revocation status depends on the * non - revoked status of this cert . To avoid * circular dependencies , we assume they ' re * revoked while checking the revocation * status of this cert . * @ throws CertPathValidatorException if the cert ' s revocation status * cannot be verified successfully with another key */ private void verifyWithSeparateSigningKey ( X509Certificate cert , PublicKey prevKey , boolean signFlag , Set < X509Certificate > stackedCerts ) throws CertPathValidatorException { } }
String msg = "revocation status" ; if ( debug != null ) { debug . println ( "RevocationChecker.verifyWithSeparateSigningKey()" + " ---checking " + msg + "..." ) ; } // reject circular dependencies - RFC 3280 is not explicit on how // to handle this , so we feel it is safest to reject them until // the issue is resolved in the PKIX WG . if ( ( stackedCerts != null ) && stackedCerts . contains ( cert ) ) { if ( debug != null ) { debug . println ( "RevocationChecker.verifyWithSeparateSigningKey()" + " circular dependency" ) ; } throw new CertPathValidatorException ( "Could not determine revocation status" , null , null , - 1 , BasicReason . UNDETERMINED_REVOCATION_STATUS ) ; } // Try to find another key that might be able to sign // CRLs vouching for this cert . // If prevKey wasn ' t trusted , maybe we just didn ' t have the right // path to it . Don ' t rule that key out . if ( ! signFlag ) { buildToNewKey ( cert , null , stackedCerts ) ; } else { buildToNewKey ( cert , prevKey , stackedCerts ) ; }
public class AmazonEC2Client { /** * Creates a new Capacity Reservation with the specified attributes . * Capacity Reservations enable you to reserve capacity for your Amazon EC2 instances in a specific Availability * Zone for any duration . This gives you the flexibility to selectively add capacity reservations and still get the * Regional RI discounts for that usage . By creating Capacity Reservations , you ensure that you always have access * to Amazon EC2 capacity when you need it , for as long as you need it . For more information , see < a * href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / ec2 - capacity - reservations . html " > Capacity * Reservations < / a > in the < i > Amazon Elastic Compute Cloud User Guide < / i > . * Your request to create a Capacity Reservation could fail if Amazon EC2 does not have sufficient capacity to * fulfill the request . If your request fails due to Amazon EC2 capacity constraints , either try again at a later * time , try in a different Availability Zone , or request a smaller capacity reservation . If your application is * flexible across instance types and sizes , try to create a Capacity Reservation with different instance * attributes . * Your request could also fail if the requested quantity exceeds your On - Demand Instance limit for the selected * instance type . If your request fails due to limit constraints , increase your On - Demand Instance limit for the * required instance type and try again . For more information about increasing your instance limits , see < a * href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / ec2 - resource - limits . html " > Amazon EC2 Service Limits < / a > * in the < i > Amazon Elastic Compute Cloud User Guide < / i > . * @ param createCapacityReservationRequest * @ return Result of the CreateCapacityReservation operation returned by the service . * @ sample AmazonEC2 . CreateCapacityReservation * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / CreateCapacityReservation " target = " _ top " > AWS * API Documentation < / a > */ @ Override public CreateCapacityReservationResult createCapacityReservation ( CreateCapacityReservationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateCapacityReservation ( request ) ;
public class Overlay { /** * Loads all bytes of the overlay into an array and returns them . * @ return array containing the overlay bytes * @ throws IOException */ public byte [ ] getDump ( ) throws IOException { } }
byte [ ] dump = new byte [ ( int ) getSize ( ) ] ; try ( RandomAccessFile raf = new RandomAccessFile ( file , "r" ) ) { raf . seek ( offset ) ; raf . readFully ( dump ) ; } return dump ;
public class OriginAliasMessage { /** * Returns the length of this message when encoded according to the Client - World Model protocol . * @ return the length , in bytes , of the encoded form of this message . */ public int getMessageLength ( ) { } }
// Message type int messageLength = 1 ; // Number of aliases messageLength += 4 ; if ( this . aliases != null ) { for ( OriginAlias alias : this . aliases ) { // Alias number , name length messageLength += 8 ; try { messageLength += alias . origin . getBytes ( "UTF-16BE" ) . length ; } catch ( UnsupportedEncodingException e ) { log . error ( "Unable to encode strings into UTF-16." ) ; e . printStackTrace ( ) ; } } } return messageLength ;
public class MuzeiArtSource { /** * This method is invoked on the worker thread with a request to process . * Only one Intent is processed at a time , but the processing happens on a * worker thread that runs independently from other application logic . * So , if this code takes a long time , it will hold up other requests to * the same IntentService , but it will not hold up anything else . * When all requests have been handled , the IntentService stops itself , * so you should not call { @ link # stopSelf } . * @ param intent The value passed to { @ link * android . content . Context # startService ( Intent ) } . * This may be null if the service is being restarted after * its process has gone away ; see * { @ link android . app . Service # onStartCommand } * for details . * @ see IntentService # onHandleIntent ( Intent ) */ @ CallSuper protected void onHandleIntent ( Intent intent ) { } }
if ( intent == null ) { return ; } String action = intent . getAction ( ) ; // TODO : permissions ? if ( ACTION_SUBSCRIBE . equals ( action ) ) { processSubscribe ( ( ComponentName ) intent . getParcelableExtra ( EXTRA_SUBSCRIBER_COMPONENT ) , intent . getStringExtra ( EXTRA_TOKEN ) ) ; } else if ( ACTION_HANDLE_COMMAND . equals ( action ) ) { int commandId = intent . getIntExtra ( EXTRA_COMMAND_ID , 0 ) ; processHandleCommand ( commandId , intent . getExtras ( ) ) ; } else if ( ACTION_NETWORK_AVAILABLE . equals ( action ) ) { processNetworkAvailable ( ) ; }
public class CheckpointEndLogRecord { /** * Gives back the serialized LogRecord as arrays of bytes . * @ return ObjectManagerByteArrayOutputStream [ ] the buffers containing the serialized LogRecord . */ public ObjectManagerByteArrayOutputStream [ ] getBuffers ( ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "getBuffers" ) ; ObjectManagerByteArrayOutputStream [ ] buffers = new ObjectManagerByteArrayOutputStream [ 1 ] ; // Create the buffer to contain the header for this log record . buffers [ 0 ] = new ObjectManagerByteArrayOutputStream ( 4 ) ; buffers [ 0 ] . writeInt ( LogRecord . TYPE_CHECKPOINT_END ) ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "getBuffers" , new Object [ ] { buffers } ) ; return buffers ;
public class JBBPDslBuilder { /** * Create anonymous custom type array with fixed size . * @ param type custom type , must not be null * @ param size expression to calculate size of the array , must not be null . * @ return the builder instance , must not be null */ public JBBPDslBuilder CustomArray ( final String type , final String size ) { } }
return this . CustomArray ( type , null , size , null ) ;
public class br { /** * < pre > * Use this operation to modify Repeater Instance . * < / pre > */ public static br update ( nitro_service client , br resource ) throws Exception { } }
resource . validate ( "modify" ) ; return ( ( br [ ] ) resource . update_resource ( client ) ) [ 0 ] ;
public class FFMQConnectionFactory { /** * / * ( non - Javadoc ) * @ see javax . naming . Referenceable # getReference ( ) */ @ Override public final Reference getReference ( ) throws NamingException { } }
Reference ref = new Reference ( getClass ( ) . getName ( ) , JNDIObjectFactory . class . getName ( ) , null ) ; ref . add ( new StringRefAddr ( "providerURL" , getProviderURL ( ) ) ) ; if ( clientID != null ) ref . add ( new StringRefAddr ( "clientID" , clientID ) ) ; return ref ;
public class SoyFileSet { /** * Extracts all messages from this Soy file set and writes the messages to an output sink . * @ param msgBundleHandler Handler to write the messages . * @ param options Options to configure how to write the extracted messages . * @ param output Where to write the extracted messages . * @ throws IOException If there are errors writing to the output . */ public void extractAndWriteMsgs ( SoyMsgBundleHandler msgBundleHandler , OutputFileOptions options , ByteSink output ) throws IOException { } }
resetErrorReporter ( ) ; SoyMsgBundle bundle = doExtractMsgs ( ) ; msgBundleHandler . writeExtractedMsgs ( bundle , options , output , errorReporter ) ; throwIfErrorsPresent ( ) ; reportWarnings ( ) ;
public class MetadataService { /** * Removes a { @ link RepositoryMetadata } of the specified { @ code repoName } from the specified * { @ code projectName } . */ public CompletableFuture < Revision > removeRepo ( Author author , String projectName , String repoName ) { } }
requireNonNull ( author , "author" ) ; requireNonNull ( projectName , "projectName" ) ; requireNonNull ( repoName , "repoName" ) ; final JsonPointer path = JsonPointer . compile ( "/repos" + encodeSegment ( repoName ) + "/removal" ) ; final Change < JsonNode > change = Change . ofJsonPatch ( METADATA_JSON , asJsonArray ( new TestAbsenceOperation ( path ) , new AddOperation ( path , Jackson . valueToTree ( UserAndTimestamp . of ( author ) ) ) ) ) ; final String commitSummary = "Remove the repo '" + repoName + "' from the project " + projectName ; return metadataRepo . push ( projectName , Project . REPO_DOGMA , author , commitSummary , change ) ;
public class EventImpl { /** * Fire an event containing a value is condition is valid . * @ throws DevFailed * @ param eventSocket */ protected void pushAttributeValueEvent ( ZMQ . Socket eventSocket ) throws DevFailed { } }
xlogger . entry ( ) ; eventTrigger . setError ( null ) ; eventTrigger . updateProperties ( ) ; if ( isSendEvent ( ) ) { sendAttributeValueEvent ( eventSocket ) ; } xlogger . exit ( ) ;
public class ObjectOriginIdentifierImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . OBJECT_ORIGIN_IDENTIFIER__SYSTEM : setSystem ( ( Integer ) newValue ) ; return ; case AfplibPackage . OBJECT_ORIGIN_IDENTIFIER__SYS_ID : setSysID ( ( byte [ ] ) newValue ) ; return ; case AfplibPackage . OBJECT_ORIGIN_IDENTIFIER__MED_ID : setMedID ( ( byte [ ] ) newValue ) ; return ; case AfplibPackage . OBJECT_ORIGIN_IDENTIFIER__DSID : setDSID ( ( byte [ ] ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class CmsContainerpageController { /** * Schedules an update of the gallery data according to the current element view and the editable containers . < p > */ protected void scheduleGalleryUpdate ( ) { } }
// only if not already scheduled if ( m_galleryUpdateTimer == null ) { m_galleryUpdateTimer = new Timer ( ) { @ Override public void run ( ) { m_galleryUpdateTimer = null ; updateGalleryData ( false , null ) ; } } ; m_galleryUpdateTimer . schedule ( 50 ) ; }
public class TomcatBoot { protected void registerShutdownHook ( ) { } }
if ( suppressShutdownHook ) { return ; } final File markFile = prepareMarkFile ( ) ; final long lastModified = markFile . lastModified ( ) ; final String exp = new SimpleDateFormat ( "yyyy/MM/dd HH:mm:ss.SSS" ) . format ( new Date ( lastModified ) ) ; info ( "...Registering the shutdown hook for the Tomcat: lastModified=" + exp ) ; new Thread ( ( ) -> { while ( true ) { if ( needsShutdown ( markFile , lastModified ) ) { shutdownForcedly ( ) ; break ; } waitForNextShuwdownHook ( ) ; } } ) . start ( ) ;
public class PactDslRequestWithPath { /** * Match a request header . * @ param header Header to match * @ param regex Regular expression to match * @ param headerExample Example value to use */ public PactDslRequestWithPath matchHeader ( String header , String regex , String headerExample ) { } }
requestMatchers . addCategory ( "header" ) . setRule ( header , new RegexMatcher ( regex ) ) ; requestHeaders . put ( header , Collections . singletonList ( headerExample ) ) ; return this ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcRectangularPyramid ( ) { } }
if ( ifcRectangularPyramidEClass == null ) { ifcRectangularPyramidEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 428 ) ; } return ifcRectangularPyramidEClass ;
public class LasUtils { /** * Distance between two points . * @ param r1 the first point . * @ param r2 the second point . * @ return the 3D distance . */ public static double distance3D ( LasRecord r1 , LasRecord r2 ) { } }
double deltaElev = Math . abs ( r1 . z - r2 . z ) ; double projectedDistance = NumericsUtilities . pythagoras ( r1 . x - r2 . x , r1 . y - r2 . y ) ; double distance = NumericsUtilities . pythagoras ( projectedDistance , deltaElev ) ; return distance ;
public class MetaClassImpl { /** * Adds an instance method to this metaclass . * @ param method The method to be added */ public void addNewInstanceMethod ( Method method ) { } }
final CachedMethod cachedMethod = CachedMethod . find ( method ) ; NewInstanceMetaMethod newMethod = new NewInstanceMetaMethod ( cachedMethod ) ; final CachedClass declaringClass = newMethod . getDeclaringClass ( ) ; addNewInstanceMethodToIndex ( newMethod , metaMethodIndex . getHeader ( declaringClass . getTheClass ( ) ) ) ;
public class TupleGenerator { /** * Returns the set of tuples that could satisfy conditions required by the given test case . */ private Iterator < Tuple > getSatisfyingTuples ( final TestCaseDef testCase , VarTupleSet varTupleSet ) { } }
final Comparator < VarBindingDef > byUsage = byUsage ( varTupleSet ) ; return IteratorUtils . transformedIterator ( // Iterate over all combinations of bindings . . . new CartesianProduct < VarBindingDef > ( // . . . combining members from all sets of bindings . . . new ArrayList < Set < VarBindingDef > > ( CollectionUtils . collect ( // . . . where each set of bindings is derived from a disjunct of unsatisfied test case conditions . . . . testCase . getRequired ( ) . getDisjuncts ( ) , // . . . and contains the set of compatible bindings that could satisfy this disjunct . . . disjunct -> { Set < String > unsatisfied = CollectionUtils . collect ( disjunct . getAssertions ( ) , assertion -> assertion . getProperty ( ) , new HashSet < String > ( ) ) ; Iterator < VarBindingDef > satisfyingBindings = IteratorUtils . filteredIterator ( getPropertyProviders ( unsatisfied ) . iterator ( ) , binding -> testCase . isCompatible ( binding ) ) ; // . . . arranging satisfying bindings in order of decreasing preference . . . return CollectionUtils . collect ( satisfyingBindings , nopTransformer ( ) , new TreeSet < VarBindingDef > ( byUsage ) ) ; } , // . . . arranging sets in a well - defined order for repeatable combinations . . . new TreeSet < Set < VarBindingDef > > ( varBindingSetSorter_ ) ) ) , // . . . ignoring any infeasible combinations . . . bindings -> Tuple . of ( bindings ) != null ) , // . . . forming each combination of satisfying bindings into a tuple . . . Tuple :: of ) ;
public class KMLGeometry { /** * A geographic location defined by longitude , latitude , and ( optional ) * altitude . * Syntax : * < Point id = " ID " > * < ! - - specific to Point - - > * < extrude > 0 < / extrude > < ! - - boolean - - > * < altitudeMode > clampToGround < / altitudeMode > * < ! - - kml : altitudeModeEnum : clampToGround , relativeToGround , or absolute * < ! - - or , substitute gx : altitudeMode : clampToSeaFloor , relativeToSeaFloor * < coordinates > . . . < / coordinates > < ! - - lon , lat [ , alt ] - - > * < / Point > * Supported syntax : * < Point > * < extrude > 0 < / extrude > * < altitudeMode > clampToGround < / altitudeMode > * < coordinates > . . . < / coordinates > < ! - - lon , lat [ , alt ] - - > * < / Point > * @ param point * @ param extrude * @ param altitudeModeEnum */ public static void toKMLPoint ( Point point , ExtrudeMode extrude , int altitudeModeEnum , StringBuilder sb ) { } }
sb . append ( "<Point>" ) ; appendExtrude ( extrude , sb ) ; appendAltitudeMode ( altitudeModeEnum , sb ) ; sb . append ( "<coordinates>" ) ; Coordinate coord = point . getCoordinate ( ) ; sb . append ( coord . x ) . append ( "," ) . append ( coord . y ) ; if ( ! Double . isNaN ( coord . z ) ) { sb . append ( "," ) . append ( coord . z ) ; } sb . append ( "</coordinates>" ) . append ( "</Point>" ) ;
public class AbstractJaxb { /** * delete all descendant elements that is specified tag type . * @ param tagType */ @ SuppressWarnings ( "unchecked" ) public < T extends AbstractJaxb > void removeDescendants ( Class < T > tagType ) { } }
RemoveDescendantsUtil . removeDescendants ( ( T ) this , tagType ) ;
public class Chorus { /** * to get the suite name we concatenate all the values provided for suite name switch */ public String getSuiteName ( ) { } }
return configReader . isSet ( ChorusConfigProperty . SUITE_NAME ) ? concatenateName ( configReader . getValues ( ChorusConfigProperty . SUITE_NAME ) ) : "" ;
public class JavaUtil { /** * Getting last elements of list in reverse order * @ param elements source list * @ param limit maximum elements count * @ param < T > type of objects * @ return result list */ public static < T > List < T > last ( List < T > elements , int limit ) { } }
ArrayList < T > res = new ArrayList < T > ( ) ; for ( int i = 0 ; i < elements . size ( ) ; i ++ ) { if ( res . size ( ) >= limit ) { break ; } res . add ( elements . get ( elements . size ( ) - 1 - i ) ) ; } return res ;
public class Optional { /** * If a value is present , apply the provided mapping function to it , * and if the result is non - null , return an { @ code Optional } describing the * result . Otherwise return an empty { @ code Optional } . * @ apiNote This method supports post - processing on optional values , without * the need to explicitly check for a return status . For example , the * following code traverses a stream of file names , selects one that has * not yet been processed , and then opens that file , returning an * { @ code Optional < FileInputStream > } : * < pre > { @ code * Optional < FileInputStream > fis = * names . stream ( ) . filter ( name - > ! isProcessedYet ( name ) ) * . findFirst ( ) * . map ( name - > new FileInputStream ( name ) ) ; * } < / pre > * Here , { @ code findFirst } returns an { @ code Optional < String > } , and then * { @ code map } returns an { @ code Optional < FileInputStream > } for the desired * file if one exists . * @ param < U > The type of the result of the mapping function * @ param mapper a mapping function to apply to the value , if present * @ return an { @ code Optional } describing the result of applying a mapping * function to the value of this { @ code Optional } , if a value is present , * otherwise an empty { @ code Optional } * @ throws NullPointerException if the mapping function is null */ public < U > Optional < U > map ( Function < ? super T , ? extends U > mapper ) { } }
Objects . requireNonNull ( mapper ) ; if ( ! isPresent ( ) ) return empty ( ) ; else { return Optional . ofNullable ( mapper . apply ( value ) ) ; }
public class ZmqEventConsumer { @ Override protected void checkDeviceConnection ( DeviceProxy deviceProxy , String attribute , DeviceData deviceData , String event_name ) throws DevFailed { } }
// Check if address is coherent ( ? ? ) deviceData = checkZmqAddress ( deviceData , deviceProxy ) ; String deviceName = deviceProxy . fullName ( ) ; ApiUtil . printTrace ( "checkDeviceConnection for " + deviceName ) ; if ( ! device_channel_map . containsKey ( deviceName ) ) { ApiUtil . printTrace ( " Does NOT Exist" ) ; connect ( deviceProxy , attribute , event_name , deviceData ) ; if ( ! device_channel_map . containsKey ( deviceName ) ) { Except . throw_event_system_failed ( "API_NotificationServiceFailed" , "Failed to connect to event channel for device" , "EventConsumer.subscribe_event()" ) ; } } else { ApiUtil . printTrace ( deviceName + " already connected." ) ; ZMQutils . connectEvent ( deviceProxy . get_tango_host ( ) , deviceName , attribute , deviceData . extractLongStringArray ( ) , event_name , false ) ; }
public class SelectAlgorithmAndInputPanel { /** * Specifies an image which contains the original input image . After this has been called the * view input image widget is activated and when selected this image will be displayed instead * of the main GUI . This functionality is optional . * @ param image Original input image . */ public void setInputImage ( BufferedImage image ) { } }
inputImage = image ; SwingUtilities . invokeLater ( new Runnable ( ) { public void run ( ) { if ( inputImage == null ) { originalCheck . setEnabled ( false ) ; } else { originalCheck . setEnabled ( true ) ; origPanel . setImage ( inputImage ) ; origPanel . setPreferredSize ( new Dimension ( inputImage . getWidth ( ) , inputImage . getHeight ( ) ) ) ; origPanel . repaint ( ) ; } } } ) ;
public class RePatternManager { /** * Pattern containing regular expression is finalized , i . e . , created correctly and added to hmAllRePattern . * @ param name key name * @ param rePattern repattern value */ private void finalizeRePattern ( String name , String rePattern ) { } }
// create correct regular expression rePattern = rePattern . replaceFirst ( "\\|" , "" ) ; /* this was added to reduce the danger of getting unusable groups from user - made repattern * files with group - producing parentheses ( i . e . " ( foo | bar ) " while matching against the documents . */ rePattern = rePattern . replaceAll ( "\\(([^\\?])" , "(?:$1" ) ; rePattern = "(" + rePattern + ")" ; rePattern = rePattern . replaceAll ( "\\\\" , "\\\\\\\\" ) ; // add rePattern to hmAllRePattern hmAllRePattern . put ( name , rePattern ) ;
public class RegisterWebAppVisitorHS { /** * Creates an instance of a class from class name . * @ param clazz class of the required object * @ param classLoader class loader to use to load the class * @ param className class name for the object to create * @ return created object * @ throws NullArgumentException if any of the parameters is null * @ throws ClassNotFoundException re - thrown * @ throws IllegalAccessException re - thrown * @ throws InstantiationException re - thrown */ public static < T > T newInstance ( final Class < T > clazz , final ClassLoader classLoader , final String className ) throws ClassNotFoundException , IllegalAccessException , InstantiationException { } }
return loadClass ( clazz , classLoader , className ) . newInstance ( ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "l" ) public JAXBElement < String > createL ( String value ) { } }
return new JAXBElement < String > ( _L_QNAME , String . class , null , value ) ;
public class ConstantPool { /** * Adds a long constant . */ public LongConstant addLong ( long value ) { } }
LongConstant entry = getLongByValue ( value ) ; if ( entry != null ) return entry ; entry = new LongConstant ( this , _entries . size ( ) , value ) ; addConstant ( entry ) ; addConstant ( null ) ; return entry ;
public class EventManager { /** * Triggers an event through the EventManager . This will call the registered EventObservers with the provided * event . * @ param event object */ public void trigger ( Object event ) { } }
Set < Class > eventTypes = getAllInheritedClasses ( event . getClass ( ) ) ; for ( Class eventType : eventTypes ) { if ( observers . containsKey ( eventType ) ) { for ( EventObserver eventObserver : observers . get ( eventType ) ) { executionQueue . get ( ) . add ( new EventExecution ( event , eventObserver ) ) ; } } } triggerQueue ( ) ;
public class PropertiesUtil { /** * Resolve the values of properties if any contains $ { . . . } . */ public static void resolve ( Map < String , String > properties ) { } }
boolean resolved = true ; while ( resolved ) { resolved = false ; for ( Map . Entry < String , String > p : properties . entrySet ( ) ) { String v = p . getValue ( ) ; if ( v == null ) continue ; int i = v . indexOf ( "${" ) ; if ( i < 0 ) continue ; int j = v . indexOf ( '}' , i + 2 ) ; if ( j < 0 ) continue ; String name = v . substring ( i + 2 , j ) ; String val = properties . get ( name ) ; if ( val == null ) val = "" ; v = v . substring ( 0 , i ) + val + v . substring ( j + 1 ) ; properties . put ( p . getKey ( ) , v ) ; resolved = true ; break ; } }
public class JDALogger { /** * Utility function to enable logging of complex statements more efficiently ( lazy ) . * @ param lazyLambda * The Supplier used when evaluating the expression * @ return An Object that can be passed to SLF4J ' s logging methods as lazy parameter */ public static Object getLazyString ( LazyEvaluation lazyLambda ) { } }
return new Object ( ) { @ Override public String toString ( ) { try { return lazyLambda . getString ( ) ; } catch ( Exception ex ) { StringWriter sw = new StringWriter ( ) ; ex . printStackTrace ( new PrintWriter ( sw ) ) ; return "Error while evaluating lazy String... " + sw . toString ( ) ; } } } ;
public class Type { /** * Checks if the element is assignable to a variable of the desired type . * @ param match current pattern match * @ param ind mapped indices * @ return true if the element is assignable to a variable of the desired type */ @ Override public boolean satisfies ( Match match , int ... ind ) { } }
assert ind . length == 1 ; return clazz . isAssignableFrom ( match . get ( ind [ 0 ] ) . getModelInterface ( ) ) ;
public class DeactivateUsers { /** * Runs the example . * @ param adManagerServices the services factory . * @ param session the session . * @ param userId the ID of the user to deactivate . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session , long userId ) throws RemoteException { } }
// Get the UserService . UserServiceInterface userService = adManagerServices . get ( session , UserServiceInterface . class ) ; // Create a statement to select a user . StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "WHERE id = :id" ) . orderBy ( "id ASC" ) . limit ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) . withBindVariableValue ( "id" , userId ) ; // Default for total result set size . int totalResultSetSize = 0 ; do { // Get users by statement . UserPage page = userService . getUsersByStatement ( statementBuilder . toStatement ( ) ) ; if ( page . getResults ( ) != null ) { totalResultSetSize = page . getTotalResultSetSize ( ) ; int i = page . getStartIndex ( ) ; for ( User user : page . getResults ( ) ) { System . out . printf ( "%d) User with ID %d will be deactivated.%n" , i ++ , user . getId ( ) ) ; } } statementBuilder . increaseOffsetBy ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; } while ( statementBuilder . getOffset ( ) < totalResultSetSize ) ; System . out . printf ( "Number of users to be deactivated: %d%n" , totalResultSetSize ) ; if ( totalResultSetSize > 0 ) { // Remove limit and offset from statement . statementBuilder . removeLimitAndOffset ( ) ; // Create action . com . google . api . ads . admanager . axis . v201902 . DeactivateUsers action = new com . google . api . ads . admanager . axis . v201902 . DeactivateUsers ( ) ; // Perform action . UpdateResult result = userService . performUserAction ( action , statementBuilder . toStatement ( ) ) ; if ( result != null && result . getNumChanges ( ) > 0 ) { System . out . printf ( "Number of users deactivated: %d%n" , result . getNumChanges ( ) ) ; } else { System . out . println ( "No users were deactivated." ) ; } }
public class CreateDevEndpointResult { /** * The security groups assigned to the new DevEndpoint . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSecurityGroupIds ( java . util . Collection ) } or { @ link # withSecurityGroupIds ( java . util . Collection ) } if you * want to override the existing values . * @ param securityGroupIds * The security groups assigned to the new DevEndpoint . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateDevEndpointResult withSecurityGroupIds ( String ... securityGroupIds ) { } }
if ( this . securityGroupIds == null ) { setSecurityGroupIds ( new java . util . ArrayList < String > ( securityGroupIds . length ) ) ; } for ( String ele : securityGroupIds ) { this . securityGroupIds . add ( ele ) ; } return this ;
public class TopLevelDomainsInner { /** * Get details of a top - level domain . * Get details of a top - level domain . * @ param name Name of the top - level domain . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < TopLevelDomainInner > getAsync ( String name , final ServiceCallback < TopLevelDomainInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( name ) , serviceCallback ) ;
public class ParallelTask { /** * Cancel on target hosts . * @ param targetHosts * the target hosts * @ return true , if successful */ @ SuppressWarnings ( "deprecation" ) public boolean cancelOnTargetHosts ( List < String > targetHosts ) { } }
boolean success = false ; try { switch ( state ) { case IN_PROGRESS : if ( executionManager != null && ! executionManager . isTerminated ( ) ) { executionManager . tell ( new CancelTaskOnHostRequest ( targetHosts ) , executionManager ) ; logger . info ( "asked task to stop from running on target hosts with count {}..." , targetHosts . size ( ) ) ; } else { logger . info ( "manager already killed or not exist.. NO OP" ) ; } success = true ; break ; case COMPLETED_WITHOUT_ERROR : case COMPLETED_WITH_ERROR : case WAITING : logger . info ( "will NO OP for cancelOnTargetHost as it is not in IN_PROGRESS state" ) ; success = true ; break ; default : break ; } } catch ( Exception e ) { logger . error ( "cancel task {} on hosts with count {} error with exception details " , this . getTaskId ( ) , targetHosts . size ( ) , e ) ; } return success ;
public class ClientMediaEntry { /** * Get media resource as an InputStream , should work regardless of whether you set the media * resource data as an InputStream or as a byte array . */ public InputStream getAsStream ( ) throws ProponoException { } }
if ( getContents ( ) != null && ! getContents ( ) . isEmpty ( ) ) { final Content c = getContents ( ) . get ( 0 ) ; if ( c . getSrc ( ) != null ) { return getResourceAsStream ( ) ; } else if ( inputStream != null ) { return inputStream ; } else if ( bytes != null ) { return new ByteArrayInputStream ( bytes ) ; } else { throw new ProponoException ( "ERROR: no src URI or binary data to return" ) ; } } else { throw new ProponoException ( "ERROR: no content found in entry" ) ; }
public class SuccessWithValue { /** * Create a new object with the given value . * @ param < DATATYPE > * The data type that is wrapped together with the success indicator * @ param aSuccessIndicator * The success indicator . May not be < code > null < / code > . * @ param aValue * The value to be used . May be < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull public static < DATATYPE > SuccessWithValue < DATATYPE > create ( @ Nonnull final ISuccessIndicator aSuccessIndicator , @ Nullable final DATATYPE aValue ) { } }
return new SuccessWithValue < > ( aSuccessIndicator , aValue ) ;
public class RScriptExporter { /** * Creates an R script file plotting multiple surfaces based on a data frame * generated with 1024 values in the scope of FldExporter : : AllVariables for * the two input variables * @ param file is the R script file * @ param engine is the engine to export * @ throws IOException if any error occurs during writing the file */ @ Override public void toFile ( File file , Engine engine ) throws IOException { } }
if ( engine . getInputVariables ( ) . isEmpty ( ) ) { throw new RuntimeException ( "[exporter error] engine has no input variables to export the surface" ) ; } if ( engine . getOutputVariables ( ) . isEmpty ( ) ) { throw new RuntimeException ( "[exporter error] engine has no output variables to export the surface" ) ; } InputVariable a = engine . getInputVariables ( ) . get ( 0 ) ; InputVariable b = engine . getInputVariables ( ) . get ( 1 % engine . numberOfInputVariables ( ) ) ; toFile ( file , engine , a , b , 1024 , FldExporter . ScopeOfValues . AllVariables , engine . getOutputVariables ( ) ) ;
public class BindTypeBuilder { /** * Generate method to parse xml stream . * @ param context * the context * @ param entity * kind of object to manage */ private static void generateParseOnXml ( BindTypeContext context , BindEntity entity ) { } }
// @ formatter : off MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "parseOnXml" ) . addJavadoc ( "parse xml\n" ) . addAnnotation ( Override . class ) . addModifiers ( Modifier . PUBLIC ) // . addParameter ( typeName ( KriptonXmlContext . class ) , " context " ) . addParameter ( typeName ( XMLParser . class ) , "xmlParser" ) . addParameter ( typeName ( Integer . TYPE ) , "currentEventType" ) . returns ( typeName ( entity . getElement ( ) ) ) . addException ( Exception . class ) ; // @ formatter : on boolean mutableObject = entity . isMutablePojo ( ) ; if ( mutableObject ) { methodBuilder . addStatement ( "$T instance = new $T()" , entity . getElement ( ) , entity . getElement ( ) ) ; } else { ImmutableUtility . generateImmutableVariableInit ( entity , methodBuilder ) ; } methodBuilder . addStatement ( "int eventType = currentEventType" ) ; methodBuilder . addStatement ( "boolean read=true" ) ; methodBuilder . addCode ( "\n" ) ; methodBuilder . beginControlFlow ( "if (currentEventType == 0)" ) ; methodBuilder . addStatement ( "eventType = xmlParser.next()" ) ; methodBuilder . nextControlFlow ( "else" ) ; methodBuilder . addStatement ( "eventType = xmlParser.getEventType()" ) ; methodBuilder . endControlFlow ( ) ; methodBuilder . addStatement ( "String currentTag = xmlParser.getName().toString()" ) ; methodBuilder . addStatement ( "String elementName = currentTag" ) ; generateParseOnXmlAttributes ( context , methodBuilder , entity ) ; methodBuilder . addCode ( "\n" ) ; methodBuilder . addCode ( "//sub-elements\n" ) ; methodBuilder . beginControlFlow ( "while (xmlParser.hasNext() && elementName!=null)" ) ; methodBuilder . beginControlFlow ( "if (read)" ) ; methodBuilder . addStatement ( "eventType = xmlParser.next()" ) ; methodBuilder . nextControlFlow ( "else" ) ; methodBuilder . addStatement ( "eventType = xmlParser.getEventType()" ) ; methodBuilder . endControlFlow ( ) ; methodBuilder . addStatement ( "read=true" ) ; methodBuilder . beginControlFlow ( "switch(eventType)$>" ) ; methodBuilder . addCode ( "case $T.START_TAG:\n$>" , XmlPullParser . class ) ; generateParserOnXmlStartElement ( context , methodBuilder , "instance" , "xmlParser" , entity ) ; methodBuilder . addStatement ( "$<break" ) ; methodBuilder . addCode ( "case $T.END_TAG:\n$>" , XmlPullParser . class ) ; generateParserOnXmlEndElement ( context , methodBuilder , "instance" , "xmlParser" , entity ) ; methodBuilder . addStatement ( "$<break" ) ; methodBuilder . addCode ( "case $T.CDSECT:\n" , XmlPullParser . class ) ; methodBuilder . addCode ( "case $T.TEXT:\n$>" , XmlPullParser . class ) ; generateParserOnXmlCharacters ( context , methodBuilder , "instance" , "xmlParser" , entity ) ; methodBuilder . addStatement ( "$<break" ) ; methodBuilder . addCode ( "default:\n$>" ) ; methodBuilder . addStatement ( "$<break" ) ; methodBuilder . addCode ( "$<" ) ; methodBuilder . endControlFlow ( ) ; // @ formatter : on methodBuilder . endControlFlow ( ) ; if ( ! mutableObject ) { ImmutableUtility . generateImmutableEntityCreation ( entity , methodBuilder , "instance" , true ) ; } methodBuilder . addStatement ( "return instance" ) ; context . builder . addMethod ( methodBuilder . build ( ) ) ;
public class AbstractReaderModule { /** * Read a file and process it for list information . * @ param ref system path of the file to process * @ param parseFile file to parse , may be { @ code null } * @ throws DITAOTException if processing failed */ void readFile ( final Reference ref , final URI parseFile ) throws DITAOTException { } }
currentFile = ref . filename ; assert currentFile . isAbsolute ( ) ; final URI src = parseFile != null ? parseFile : currentFile ; assert src . isAbsolute ( ) ; final URI rel = tempFileNameScheme . generateTempFileName ( currentFile ) ; outputFile = new File ( job . tempDirURI . resolve ( rel ) ) ; final File outputDir = outputFile . getParentFile ( ) ; if ( ! outputDir . exists ( ) && ! outputDir . mkdirs ( ) ) { logger . error ( "Failed to create output directory " + outputDir . getAbsolutePath ( ) ) ; return ; } validateMap = Collections . emptyMap ( ) ; defaultValueMap = Collections . emptyMap ( ) ; logger . info ( "Processing " + currentFile + " to " + outputFile . toURI ( ) ) ; final String [ ] params = { currentFile . toString ( ) } ; // Verify stub for current file is in Job final FileInfo fi = job . getFileInfo ( currentFile ) ; if ( fi == null ) { final FileInfo stub = new FileInfo . Builder ( ) . src ( currentFile ) . uri ( rel ) . result ( currentFile ) . isInput ( currentFile . equals ( rootFile ) ) . build ( ) ; job . add ( stub ) ; } // InputSource in = null ; Result out = null ; try { final TransformerFactory tf = TransformerFactory . newInstance ( ) ; final SAXTransformerFactory stf = ( SAXTransformerFactory ) tf ; final TransformerHandler serializer = stf . newTransformerHandler ( ) ; XMLReader parser = getXmlReader ( ref . format ) ; XMLReader xmlSource = parser ; for ( final XMLFilter f : getProcessingPipe ( currentFile ) ) { f . setParent ( xmlSource ) ; f . setEntityResolver ( CatalogUtils . getCatalogResolver ( ) ) ; xmlSource = f ; } try { final LexicalHandler lexicalHandler = new DTDForwardHandler ( xmlSource ) ; parser . setProperty ( "http://xml.org/sax/properties/lexical-handler" , lexicalHandler ) ; parser . setFeature ( "http://xml.org/sax/features/lexical-handler" , true ) ; } catch ( final SAXNotRecognizedException e ) { } // in = new InputSource ( src . toString ( ) ) ; out = new StreamResult ( new FileOutputStream ( outputFile ) ) ; serializer . setResult ( out ) ; xmlSource . setContentHandler ( serializer ) ; xmlSource . parse ( src . toString ( ) ) ; if ( listFilter . isValidInput ( ) ) { processParseResult ( currentFile ) ; categorizeCurrentFile ( ref ) ; } else if ( ! currentFile . equals ( rootFile ) ) { logger . warn ( MessageUtils . getMessage ( "DOTJ021W" , params ) . toString ( ) ) ; failureList . add ( currentFile ) ; } } catch ( final RuntimeException e ) { throw e ; } catch ( final SAXParseException sax ) { final Exception inner = sax . getException ( ) ; if ( inner != null && inner instanceof DITAOTException ) { throw ( DITAOTException ) inner ; } if ( currentFile . equals ( rootFile ) ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ012F" , params ) . toString ( ) + ": " + sax . getMessage ( ) , sax ) ; } else if ( processingMode == Mode . STRICT ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ013E" , params ) . toString ( ) + ": " + sax . getMessage ( ) , sax ) ; } else { logger . error ( MessageUtils . getMessage ( "DOTJ013E" , params ) . toString ( ) + ": " + sax . getMessage ( ) , sax ) ; } failureList . add ( currentFile ) ; } catch ( final FileNotFoundException e ) { if ( ! exists ( currentFile ) ) { if ( currentFile . equals ( rootFile ) ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTA069F" , params ) . toString ( ) , e ) ; } else if ( processingMode == Mode . STRICT ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTX008E" , params ) . toString ( ) , e ) ; } else { logger . error ( MessageUtils . getMessage ( "DOTX008E" , params ) . toString ( ) ) ; } } else if ( currentFile . equals ( rootFile ) ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ078F" , params ) . toString ( ) + " Cannot load file: " + e . getMessage ( ) , e ) ; } else if ( processingMode == Mode . STRICT ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ079E" , params ) . toString ( ) + " Cannot load file: " + e . getMessage ( ) , e ) ; } else { logger . error ( MessageUtils . getMessage ( "DOTJ079E" , params ) . toString ( ) + " Cannot load file: " + e . getMessage ( ) ) ; } failureList . add ( currentFile ) ; } catch ( final Exception e ) { if ( currentFile . equals ( rootFile ) ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ012F" , params ) . toString ( ) + ": " + e . getMessage ( ) , e ) ; } else if ( processingMode == Mode . STRICT ) { throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ013E" , params ) . toString ( ) + ": " + e . getMessage ( ) , e ) ; } else { logger . error ( MessageUtils . getMessage ( "DOTJ013E" , params ) . toString ( ) + ": " + e . getMessage ( ) , e ) ; } failureList . add ( currentFile ) ; } finally { if ( out != null ) { try { close ( out ) ; } catch ( final IOException e ) { logger . error ( e . getMessage ( ) , e ) ; } } if ( failureList . contains ( currentFile ) ) { FileUtils . deleteQuietly ( outputFile ) ; } } if ( ! listFilter . isValidInput ( ) && currentFile . equals ( rootFile ) ) { if ( validate ) { // stop the build if all content in the input file was filtered out . throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ022F" , params ) . toString ( ) ) ; } else { // stop the build if the content of the file is not valid . throw new DITAOTException ( MessageUtils . getMessage ( "DOTJ034F" , params ) . toString ( ) ) ; } } doneList . add ( currentFile ) ; listFilter . reset ( ) ; keydefFilter . reset ( ) ;
public class Element { /** * Use the JavaXPath to determine if element is present . If not , then try * finding element . Return false if the element does not exist * @ return true or false * @ throws WidgetException */ private boolean isElementPresent_internal ( ) throws WidgetException { } }
try { try { final boolean isPotentiallyXpathWithLocator = ( locator instanceof EByFirstMatching ) || ( locator instanceof EByXpath ) ; if ( isPotentiallyXpathWithLocator && isElementPresentJavaXPath ( ) ) return true ; } catch ( Exception e ) { // Continue } findElement ( ) ; return true ; } catch ( NoSuchElementException e ) { return false ; }
public class AlbumActivity { /** * Select picture , from album . */ private void selectAlbum ( ) { } }
Album . album ( this ) . multipleChoice ( ) . columnCount ( 2 ) . selectCount ( 6 ) . camera ( true ) . cameraVideoQuality ( 1 ) . cameraVideoLimitDuration ( Integer . MAX_VALUE ) . cameraVideoLimitBytes ( Integer . MAX_VALUE ) . checkedList ( mAlbumFiles ) . widget ( Widget . newDarkBuilder ( this ) . title ( mToolbar . getTitle ( ) . toString ( ) ) . build ( ) ) . onResult ( new Action < ArrayList < AlbumFile > > ( ) { @ Override public void onAction ( @ NonNull ArrayList < AlbumFile > result ) { mAlbumFiles = result ; mAdapter . notifyDataSetChanged ( mAlbumFiles ) ; mTvMessage . setVisibility ( result . size ( ) > 0 ? View . VISIBLE : View . GONE ) ; } } ) . onCancel ( new Action < String > ( ) { @ Override public void onAction ( @ NonNull String result ) { Toast . makeText ( AlbumActivity . this , R . string . canceled , Toast . LENGTH_LONG ) . show ( ) ; } } ) . start ( ) ;
public class HBaseClient { /** * ( non - Javadoc ) . * @ param < E > * the element type * @ param entityClass * the entity class * @ param col * the col * @ return the list * @ see com . impetus . kundera . client . Client # find ( java . lang . Class , * java . util . Map ) */ @ Override public < E > List < E > find ( Class < E > entityClass , Map < String , String > col ) { } }
EntityMetadata entityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , getPersistenceUnit ( ) , entityClass ) ; List < E > entities = new ArrayList < E > ( ) ; Map < String , Field > columnFamilyNameToFieldMap = MetadataUtils . createSuperColumnsFieldMap ( entityMetadata , kunderaMetadata ) ; for ( String columnFamilyName : col . keySet ( ) ) { String entityId = col . get ( columnFamilyName ) ; if ( entityId != null ) { E e = null ; try { List results = new ArrayList ( ) ; fetchEntity ( entityClass , entityId , entityMetadata , entityMetadata . getRelationNames ( ) , entityMetadata . getSchema ( ) , results , null , null ) ; if ( results != null ) { e = ( E ) results . get ( 0 ) ; } } catch ( IOException ioex ) { log . error ( "Error during find for embedded entities, Caused by: ." , ioex ) ; throw new KunderaException ( ioex ) ; } Field columnFamilyField = columnFamilyNameToFieldMap . get ( columnFamilyName . substring ( 0 , columnFamilyName . indexOf ( "|" ) ) ) ; Object columnFamilyValue = PropertyAccessorHelper . getObject ( e , columnFamilyField ) ; if ( Collection . class . isAssignableFrom ( columnFamilyField . getType ( ) ) ) { entities . addAll ( ( Collection ) columnFamilyValue ) ; } else { entities . add ( ( E ) columnFamilyValue ) ; } } } return entities ;
public class SelectSubPlanAssembler { /** * A method to filter out single - TVE expressions . * @ param expr List of single - TVE expressions . * @ param otherExprs List of multi - TVE expressions . * @ return List of single - TVE expressions from the input collection . */ private static List < AbstractExpression > filterSingleTVEExpressions ( List < AbstractExpression > exprs , List < AbstractExpression > otherExprs ) { } }
List < AbstractExpression > singleTVEExprs = new ArrayList < > ( ) ; for ( AbstractExpression expr : exprs ) { List < TupleValueExpression > tves = ExpressionUtil . getTupleValueExpressions ( expr ) ; if ( tves . size ( ) == 1 ) { singleTVEExprs . add ( expr ) ; } else { otherExprs . add ( expr ) ; } } return singleTVEExprs ;
public class Scheduler { /** * Initializes the scheduler by booting up Quartz */ public void initialize ( ) { } }
try { this . quartzScheduler = new StdSchedulerFactory ( ) . getScheduler ( ) ; this . quartzScheduler . setJobFactory ( Application . getInstance ( SchedulerFactory . class ) ) ; } catch ( final SchedulerException e ) { LOG . error ( "Failed to initialize scheduler" , e ) ; }
public class CFEndPointImpl { /** * Identify if one of the outbound channel definitions has a factory that * implements * the LocalChannelFactory . */ private void determineIsLocal ( ) { } }
// Loop through each channel in the outbound chain represented by this // endpoint int i = 0 ; for ( OutboundChannelDefinition def : this . outboundChannelDefs ) { if ( LocalChannelFactory . class . isAssignableFrom ( def . getOutboundFactory ( ) ) ) { // Found Local channel factory . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found LocalChannelFactory interface: " + def . getOutboundFactory ( ) ) ; } this . localChannelIndex = i ; break ; } i ++ ; }
public class SecStrucCalc { /** * Generate a summary of this SS prediction with information about * the three types of helix turns in different row sequences . * This is similar to the summary output of Jmol , and useful to visualize * the helix patterns . * @ return String helix summary */ public String printHelixSummary ( ) { } }
StringBuffer g = new StringBuffer ( ) ; // 3-10 helix StringBuffer h = new StringBuffer ( ) ; // alpha helix StringBuffer i = new StringBuffer ( ) ; // pi - helix StringBuffer ss = new StringBuffer ( ) ; // SS summary StringBuffer aa = new StringBuffer ( ) ; // AA one - letter String nl = System . getProperty ( "line.separator" ) ; g . append ( "3 turn: " ) ; h . append ( "4 turn: " ) ; i . append ( "5 turn: " ) ; ss . append ( "SS: " ) ; aa . append ( "AA: " ) ; for ( int k = 0 ; k < groups . length ; k ++ ) { SecStrucState state = getSecStrucState ( k ) ; g . append ( state . getTurn ( ) [ 0 ] ) ; h . append ( state . getTurn ( ) [ 1 ] ) ; i . append ( state . getTurn ( ) [ 2 ] ) ; ss . append ( state . getType ( ) ) ; aa . append ( StructureTools . get1LetterCode ( groups [ k ] . getPDBName ( ) ) ) ; } return g . toString ( ) + nl + h . toString ( ) + nl + i . toString ( ) + nl + ss . toString ( ) + nl + aa . toString ( ) ;
public class JavaUtils { /** * Closes the given object , ignoring IOExceptions . */ public static void closeQuietly ( Closeable closeable ) { } }
try { if ( closeable != null ) { closeable . close ( ) ; } } catch ( IOException e ) { logger . error ( "IOException should not have been thrown." , e ) ; }
public class Cell { /** * Adds { @ link # RIGHT } and clears { @ link # LEFT } for the alignment of the widget within the cell . */ public Cell < C , T > right ( ) { } }
if ( align == null ) align = RIGHT ; else { align |= RIGHT ; align &= ~ LEFT ; } return this ;
public class RemovePermissionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RemovePermissionRequest removePermissionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( removePermissionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( removePermissionRequest . getFunctionName ( ) , FUNCTIONNAME_BINDING ) ; protocolMarshaller . marshall ( removePermissionRequest . getStatementId ( ) , STATEMENTID_BINDING ) ; protocolMarshaller . marshall ( removePermissionRequest . getQualifier ( ) , QUALIFIER_BINDING ) ; protocolMarshaller . marshall ( removePermissionRequest . getRevisionId ( ) , REVISIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BatchDeleteAttributesRequest { /** * A list of items on which to perform the operation . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the * existing values . * @ param items * A list of items on which to perform the operation . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchDeleteAttributesRequest withItems ( DeletableItem ... items ) { } }
if ( this . items == null ) { setItems ( new com . amazonaws . internal . SdkInternalList < DeletableItem > ( items . length ) ) ; } for ( DeletableItem ele : items ) { this . items . add ( ele ) ; } return this ;
public class AnnotationUtils { /** * Returns all annotations of a class , also the annotations of the super * classes , implemented interfaces and the annotations that are present in * stereotype annotations . The stereotype annotation will not be included in * the annotation set . * Note that the set can contain annotation objects of the same type with * different values for their members . * @ param clazz the class from which to get the annotations * @ return all annotations that are present for the given class */ public static Set < Annotation > getAllAnnotations ( Class < ? > clazz ) { } }
Set < Annotation > annotationSet = new LinkedHashSet < Annotation > ( ) ; List < Class < ? > > annotationTypes = new ArrayList < Class < ? > > ( ) ; // Iterate through all super types of the given class for ( Class < ? > type : ReflectionUtils . getSuperTypes ( clazz ) ) { Annotation [ ] annotations = type . getAnnotations ( ) ; // Iterate through all annotations of the current class for ( Annotation a : annotations ) { // Add the current annotation to the result and to the annotation types that needed to be examained for stereotype annotations annotationSet . add ( a ) ; annotationTypes . add ( a . annotationType ( ) ) ; } } if ( stereotypeAnnotationClass != null ) { while ( ! annotationTypes . isEmpty ( ) ) { Class < ? > annotationType = annotationTypes . remove ( annotationTypes . size ( ) - 1 ) ; if ( annotationType . isAnnotationPresent ( stereotypeAnnotationClass ) ) { // If the stereotype annotation is present examine the ' inherited ' annotations for ( Annotation annotation : annotationType . getAnnotations ( ) ) { // add the ' inherited ' annotations to be examined for further stereotype annotations annotationTypes . add ( annotation . annotationType ( ) ) ; if ( ! annotation . annotationType ( ) . equals ( stereotypeAnnotationClass ) ) { // add the stereotyped annotations to the set annotationSet . add ( annotation ) ; } } } } } return annotationSet ;
public class ClasspathBuilder { /** * Add source path and resource paths of the project to the list of classpath items . * @ param items Classpath items . * @ param sourceRoots */ private void addSources ( final Collection < File > items , final Collection < String > sourceRoots ) { } }
for ( String path : sourceRoots ) { items . add ( new File ( path ) ) ; }
public class QueryRecord { /** * Free the query record . */ public void free ( ) { } }
super . free ( ) ; // Free first in case you have to Update ( ) the current record ! while ( m_LinkageList . size ( ) > 0 ) { TableLink tableLink = ( TableLink ) m_LinkageList . elementAt ( 0 ) ; tableLink . free ( ) ; } m_LinkageList . removeAllElements ( ) ; m_LinkageList = null ; m_vRecordList . free ( ) ; // Free all the records m_vRecordList = null ;
public class UserMarshaller { /** * Marshall the given parameter object . */ public void marshall ( User user , ProtocolMarshaller protocolMarshaller ) { } }
if ( user == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( user . getUserId ( ) , USERID_BINDING ) ; protocolMarshaller . marshall ( user . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( user . getPrimaryEmail ( ) , PRIMARYEMAIL_BINDING ) ; protocolMarshaller . marshall ( user . getPrimaryProvisionedNumber ( ) , PRIMARYPROVISIONEDNUMBER_BINDING ) ; protocolMarshaller . marshall ( user . getDisplayName ( ) , DISPLAYNAME_BINDING ) ; protocolMarshaller . marshall ( user . getLicenseType ( ) , LICENSETYPE_BINDING ) ; protocolMarshaller . marshall ( user . getUserRegistrationStatus ( ) , USERREGISTRATIONSTATUS_BINDING ) ; protocolMarshaller . marshall ( user . getUserInvitationStatus ( ) , USERINVITATIONSTATUS_BINDING ) ; protocolMarshaller . marshall ( user . getRegisteredOn ( ) , REGISTEREDON_BINDING ) ; protocolMarshaller . marshall ( user . getInvitedOn ( ) , INVITEDON_BINDING ) ; protocolMarshaller . marshall ( user . getPersonalPIN ( ) , PERSONALPIN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CPDefinitionInventoryPersistenceImpl { /** * Returns a range of all the cp definition inventories . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDefinitionInventoryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp definition inventories * @ param end the upper bound of the range of cp definition inventories ( not inclusive ) * @ return the range of cp definition inventories */ @ Override public List < CPDefinitionInventory > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;