signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StandardExpressions { /** * Obtain the variable expression evaluator ( implementation of { @ link IStandardVariableExpressionEvaluator } ) * registered by the Standard Dialect that is being currently used . * Normally , there should be no need to obtain this object from the developers ' code ( only internally from * { @ link IStandardExpression } implementations ) . * @ param configuration the configuration object for the current template execution environment . * @ return the variable expression evaluator object . */ public static IStandardVariableExpressionEvaluator getVariableExpressionEvaluator ( final IEngineConfiguration configuration ) { } }
final Object expressionEvaluator = configuration . getExecutionAttributes ( ) . get ( STANDARD_VARIABLE_EXPRESSION_EVALUATOR_ATTRIBUTE_NAME ) ; if ( expressionEvaluator == null || ( ! ( expressionEvaluator instanceof IStandardVariableExpressionEvaluator ) ) ) { throw new TemplateProcessingException ( "No Standard Variable Expression Evaluator has been registered as an execution argument. " + "This is a requirement for using Standard Expressions, and might happen " + "if neither the Standard or the SpringStandard dialects have " + "been added to the Template Engine and none of the specified dialects registers an " + "attribute of type " + IStandardVariableExpressionEvaluator . class . getName ( ) + " with name " + "\"" + STANDARD_VARIABLE_EXPRESSION_EVALUATOR_ATTRIBUTE_NAME + "\"" ) ; } return ( IStandardVariableExpressionEvaluator ) expressionEvaluator ;
public class TSDataOptimizerTask { /** * Start creating the optimized file . This operation resets the state of the * optimizer task , so it can be re - used for subsequent invocations . * @ return A completeable future that yields the newly created file . */ public CompletableFuture < NewFile > run ( ) { } }
LOG . log ( Level . FINE , "starting optimized file creation for {0} files" , files . size ( ) ) ; CompletableFuture < NewFile > fileCreation = new CompletableFuture < > ( ) ; final List < TSData > fjpFiles = this . files ; // We clear out files below , which makes createTmpFile see an empty map if we don ' t use a separate variable . TASK_POOL . execute ( ( ) -> createTmpFile ( fileCreation , destDir , fjpFiles , getCompression ( ) ) ) ; synchronized ( OUTSTANDING ) { OUTSTANDING . add ( fileCreation ) ; } this . files = new LinkedList < > ( ) ; // Do not use clear ! This instance is now shared with the createTmpFile task . return fileCreation ;
public class FileComparer { /** * Reads a file and returns the content as List * @ param f * @ return * @ throws IOException */ public List < String > getFileLinesAsList ( File f ) throws IOException { } }
BufferedReader br = new BufferedReader ( new InputStreamReader ( new DataInputStream ( new FileInputStream ( f ) ) ) ) ; List < String > result = new LinkedList < String > ( ) ; String strLine ; while ( ( strLine = br . readLine ( ) ) != null ) { result . add ( strLine ) ; } br . close ( ) ; return result ;
public class PropertiesFileLoader { /** * Saves changes in properties file . It reads the property file into memory , modifies it and saves it back to the file . * @ throws IOException */ public synchronized void persistProperties ( ) throws IOException { } }
beginPersistence ( ) ; // Read the properties file into memory // Shouldn ' t be so bad - it ' s a small file List < String > content = readFile ( propertiesFile ) ; BufferedWriter bw = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( propertiesFile ) , StandardCharsets . UTF_8 ) ) ; try { for ( String line : content ) { String trimmed = line . trim ( ) ; if ( trimmed . length ( ) == 0 ) { bw . newLine ( ) ; } else { Matcher matcher = PROPERTY_PATTERN . matcher ( trimmed ) ; if ( matcher . matches ( ) ) { final String key = cleanKey ( matcher . group ( 1 ) ) ; if ( toSave . containsKey ( key ) || toSave . containsKey ( key + DISABLE_SUFFIX_KEY ) ) { writeProperty ( bw , key , matcher . group ( 2 ) ) ; toSave . remove ( key ) ; toSave . remove ( key + DISABLE_SUFFIX_KEY ) ; } else if ( trimmed . startsWith ( COMMENT_PREFIX ) ) { // disabled user write ( bw , line , true ) ; } } else { write ( bw , line , true ) ; } } } endPersistence ( bw ) ; } finally { safeClose ( bw ) ; }
public class druidGLexer { /** * $ ANTLR start " ID " */ public final void mID ( ) throws RecognitionException { } }
try { int _type = ID ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 712:5 : ( ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' _ ' ) ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' 0 ' . . ' 9 ' | ' _ ' ) * ) // druidG . g : 712:7 : ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' _ ' ) ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' 0 ' . . ' 9 ' | ' _ ' ) * { if ( ( input . LA ( 1 ) >= 'A' && input . LA ( 1 ) <= 'Z' ) || input . LA ( 1 ) == '_' || ( input . LA ( 1 ) >= 'a' && input . LA ( 1 ) <= 'z' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } // druidG . g : 712:31 : ( ' a ' . . ' z ' | ' A ' . . ' Z ' | ' 0 ' . . ' 9 ' | ' _ ' ) * loop39 : while ( true ) { int alt39 = 2 ; int LA39_0 = input . LA ( 1 ) ; if ( ( ( LA39_0 >= '0' && LA39_0 <= '9' ) || ( LA39_0 >= 'A' && LA39_0 <= 'Z' ) || LA39_0 == '_' || ( LA39_0 >= 'a' && LA39_0 <= 'z' ) ) ) { alt39 = 1 ; } switch ( alt39 ) { case 1 : // druidG . g : { if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '9' ) || ( input . LA ( 1 ) >= 'A' && input . LA ( 1 ) <= 'Z' ) || input . LA ( 1 ) == '_' || ( input . LA ( 1 ) >= 'a' && input . LA ( 1 ) <= 'z' ) ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : break loop39 ; } } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class JsonHelper { /** * Return the field with name in JSON as a string , a boolean , a number or a node . * @ param json json * @ param name node name * @ return the field */ public static Object getElement ( final JsonNode json , final String name ) { } }
if ( json != null && name != null ) { JsonNode node = json ; for ( String nodeName : name . split ( "\\." ) ) { if ( node != null ) { if ( nodeName . matches ( "\\d+" ) ) { node = node . get ( Integer . parseInt ( nodeName ) ) ; } else { node = node . get ( nodeName ) ; } } } if ( node != null ) { if ( node . isNumber ( ) ) { return node . numberValue ( ) ; } else if ( node . isBoolean ( ) ) { return node . booleanValue ( ) ; } else if ( node . isTextual ( ) ) { return node . textValue ( ) ; } else if ( node . isNull ( ) ) { return null ; } else { return node ; } } } return null ;
public class ScriptService { /** * Return all scripts of a given type * @ param type integer value of type * @ return Array of scripts of the given type */ public Script [ ] getScripts ( Integer type ) { } }
ArrayList < Script > returnData = new ArrayList < > ( ) ; PreparedStatement statement = null ; ResultSet results = null ; try ( Connection sqlConnection = SQLService . getInstance ( ) . getConnection ( ) ) { statement = sqlConnection . prepareStatement ( "SELECT * FROM " + Constants . DB_TABLE_SCRIPT + " ORDER BY " + Constants . GENERIC_ID ) ; if ( type != null ) { statement = sqlConnection . prepareStatement ( "SELECT * FROM " + Constants . DB_TABLE_SCRIPT + " WHERE " + Constants . SCRIPT_TYPE + "= ?" + " ORDER BY " + Constants . GENERIC_ID ) ; statement . setInt ( 1 , type ) ; } logger . info ( "Query: {}" , statement ) ; results = statement . executeQuery ( ) ; while ( results . next ( ) ) { returnData . add ( scriptFromSQLResult ( results ) ) ; } } catch ( Exception e ) { } finally { try { if ( results != null ) { results . close ( ) ; } } catch ( Exception e ) { } try { if ( statement != null ) { statement . close ( ) ; } } catch ( Exception e ) { } } return returnData . toArray ( new Script [ 0 ] ) ;
public class ShallowEtagHeaderFilter { /** * Generate the ETag header value from the given response body byte array . * < p > The default implementation generates an MD5 hash . * @ param bytes the response bdoy as byte array * @ return the ETag header value * @ see org . springframework . util . DigestUtils */ protected String generateETagHeaderValue ( byte [ ] bytes ) { } }
StringBuilder builder = new StringBuilder ( "\"0" ) ; DigestUtils . appendMd5DigestAsHex ( bytes , builder ) ; builder . append ( '"' ) ; return builder . toString ( ) ;
public class BeanInterfaceProxy { /** * { @ inheritDoc } * If a getter method is encountered then this method returns the stored value from the bean state ( or null if the * field has not been set ) . * If a setter method is encountered then the bean state is updated with the value of the first argument and the * value is returned ( to allow for method chaining ) * @ throws IllegalArgumentException * if the method is not a valid getter / setter */ public Object invoke ( final Object proxy , final Method method , final Object [ ] args ) { } }
final String methodName = method . getName ( ) ; if ( methodName . startsWith ( GET_PREFIX ) ) { if ( method . getParameterTypes ( ) . length > 0 ) { throw new IllegalArgumentException ( String . format ( "method %s.%s() should have no parameters to be a valid getter" , method . getDeclaringClass ( ) . getName ( ) , methodName ) ) ; } // simulate getter by retrieving value from bean state return beanState . get ( methodName . substring ( GET_PREFIX . length ( ) ) ) ; } else if ( methodName . startsWith ( SET_PREFIX ) ) { if ( args == null || args . length != 1 ) { throw new IllegalArgumentException ( String . format ( "method %s.%s() should have exactly one parameter to be a valid setter" , method . getDeclaringClass ( ) . getName ( ) , methodName ) ) ; } // simulate setter by storing value in bean state beanState . put ( methodName . substring ( SET_PREFIX . length ( ) ) , args [ 0 ] ) ; return proxy ; } else { throw new IllegalArgumentException ( String . format ( "method %s.%s() is not a valid getter/setter" , method . getDeclaringClass ( ) . getName ( ) , methodName ) ) ; }
public class DerValue { /** * Returns an ASN . 1 OCTET STRING * @ return the octet string held in this DER value */ public byte [ ] getOctetString ( ) throws IOException { } }
byte [ ] bytes ; if ( tag != tag_OctetString && ! isConstructed ( tag_OctetString ) ) { throw new IOException ( "DerValue.getOctetString, not an Octet String: " + tag ) ; } bytes = new byte [ length ] ; // Note : do not tempt to call buffer . read ( bytes ) at all . There ' s a // known bug that it returns - 1 instead of 0. if ( length == 0 ) { return bytes ; } if ( buffer . read ( bytes ) != length ) throw new IOException ( "short read on DerValue buffer" ) ; if ( isConstructed ( ) ) { DerInputStream in = new DerInputStream ( bytes ) ; bytes = null ; while ( in . available ( ) != 0 ) { bytes = append ( bytes , in . getOctetString ( ) ) ; } } return bytes ;
public class TableVersionErrorMarshaller { /** * Marshall the given parameter object . */ public void marshall ( TableVersionError tableVersionError , ProtocolMarshaller protocolMarshaller ) { } }
if ( tableVersionError == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( tableVersionError . getTableName ( ) , TABLENAME_BINDING ) ; protocolMarshaller . marshall ( tableVersionError . getVersionId ( ) , VERSIONID_BINDING ) ; protocolMarshaller . marshall ( tableVersionError . getErrorDetail ( ) , ERRORDETAIL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RaygunClient { /** * Initializes the Raygun client . This expects that you have placed the API key in your * AndroidManifest . xml , in a meta - data element . * @ param context The context of the calling Android activity . */ public static void init ( Context context ) { } }
String apiKey = readApiKey ( context ) ; init ( context , apiKey ) ;
public class FSAUtils { /** * Calculate fan - out ratio ( how many nodes have a given number of outgoing arcs ) . * @ param fsa The automaton to calculate fanout for . * @ param root The starting node for calculations . * @ return The returned map contains keys for the number of outgoing arcs and * an associated value being the number of nodes with that arc number . */ public static TreeMap < Integer , Integer > calculateFanOuts ( final FSA fsa , int root ) { } }
final int [ ] result = new int [ 256 ] ; fsa . visitInPreOrder ( new StateVisitor ( ) { public boolean accept ( int state ) { int count = 0 ; for ( int arc = fsa . getFirstArc ( state ) ; arc != 0 ; arc = fsa . getNextArc ( arc ) ) { count ++ ; } result [ count ] ++ ; return true ; } } ) ; TreeMap < Integer , Integer > output = new TreeMap < Integer , Integer > ( ) ; int low = 1 ; // Omit # 0 , there is always a single node like that ( dummy ) . while ( low < result . length && result [ low ] == 0 ) { low ++ ; } int high = result . length - 1 ; while ( high >= 0 && result [ high ] == 0 ) { high -- ; } for ( int i = low ; i <= high ; i ++ ) { output . put ( i , result [ i ] ) ; } return output ;
public class CmsDialog { /** * Builds the standard javascript for submitting the dialog . < p > * @ return the standard javascript for submitting the dialog */ @ Override public String dialogScriptSubmit ( ) { } }
if ( useNewStyle ( ) ) { return super . dialogScriptSubmit ( ) ; } StringBuffer result = new StringBuffer ( 512 ) ; result . append ( "function submitAction(actionValue, theForm, formName) {\n" ) ; result . append ( "\tif (theForm == null) {\n" ) ; result . append ( "\t\ttheForm = document.forms[formName];\n" ) ; result . append ( "\t}\n" ) ; result . append ( "\ttheForm." + PARAM_FRAMENAME + ".value = window.name;\n" ) ; result . append ( "\tif (actionValue == \"" + DIALOG_OK + "\") {\n" ) ; result . append ( "\t\treturn true;\n" ) ; result . append ( "\t}\n" ) ; result . append ( "\ttheForm." + PARAM_ACTION + ".value = actionValue;\n" ) ; result . append ( "\ttheForm.submit();\n" ) ; result . append ( "\treturn false;\n" ) ; result . append ( "}\n" ) ; return result . toString ( ) ;
public class HashSSORealm { public Credential getSingleSignOn ( HttpRequest request , HttpResponse response ) { } }
String ssoID = null ; Cookie [ ] cookies = request . getCookies ( ) ; for ( int i = 0 ; i < cookies . length ; i ++ ) { if ( cookies [ i ] . getName ( ) . equals ( SSO_COOKIE_NAME ) ) { ssoID = cookies [ i ] . getValue ( ) ; break ; } } if ( log . isDebugEnabled ( ) ) log . debug ( "get ssoID=" + ssoID ) ; Principal principal = null ; Credential credential = null ; synchronized ( _ssoId2Principal ) { principal = ( Principal ) _ssoId2Principal . get ( ssoID ) ; credential = ( Credential ) _ssoPrincipal2Credential . get ( principal ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "SSO principal=" + principal ) ; if ( principal != null && credential != null ) { if ( response . getHttpContext ( ) . getRealm ( ) . reauthenticate ( principal ) ) { request . setUserPrincipal ( principal ) ; request . setAuthUser ( principal . getName ( ) ) ; return credential ; } else { synchronized ( _ssoId2Principal ) { _ssoId2Principal . remove ( ssoID ) ; _ssoPrincipal2Credential . remove ( principal ) ; _ssoUsername2Id . remove ( principal . getName ( ) ) ; } } } return null ;
public class NavigationMapboxMap { /** * The maximum preferred frames per second at which to render the map . * This property only takes effect when the application has limited resources , such as when * the device is running on battery power . By default , this is set to 20fps . * Throttling will also only take effect when the camera is currently tracking * the user location . * @ param maxFpsThreshold to be used to limit map frames per second */ public void updateMapFpsThrottle ( int maxFpsThreshold ) { } }
if ( mapFpsDelegate != null ) { mapFpsDelegate . updateMaxFpsThreshold ( maxFpsThreshold ) ; } else { settings . updateMaxFps ( maxFpsThreshold ) ; }
public class CorcInputFormat { /** * Gets the StructTypeInfo that declares the columns to be read from the configuration */ static StructTypeInfo getTypeInfo ( Configuration conf ) { } }
StructTypeInfo inputTypeInfo = ( StructTypeInfo ) TypeInfoUtils . getTypeInfoFromTypeString ( conf . get ( INPUT_TYPE_INFO ) ) ; LOG . debug ( "Got input typeInfo from conf: {}" , inputTypeInfo ) ; return inputTypeInfo ;
public class RunnersApi { /** * Get details of a runner . * < pre > < code > GitLab Endpoint : GET / runners / : id < / code > < / pre > * @ param runnerId Runner id to get details for * @ return RunnerDetail instance . * @ throws GitLabApiException if any exception occurs */ public RunnerDetail getRunnerDetail ( Integer runnerId ) throws GitLabApiException { } }
if ( runnerId == null ) { throw new RuntimeException ( "runnerId cannot be null" ) ; } Response response = get ( Response . Status . OK , null , "runners" , runnerId ) ; return ( response . readEntity ( RunnerDetail . class ) ) ;
public class Pack { /** * Pack the images provided * @ param images The list of sprite objects pointing at the images to be packed * @ param width The width of the sheet to be generated * @ param height The height of the sheet to be generated * @ param border The border between sprites * @ param out The file to write out to * @ return The generated sprite sheet * @ throws IOException Indicates a failure to write out files */ public Sheet packImages ( ArrayList images , int width , int height , int border , File out ) throws IOException { } }
Collections . sort ( images , new Comparator ( ) { public int compare ( Object o1 , Object o2 ) { Sprite a = ( Sprite ) o1 ; Sprite b = ( Sprite ) o2 ; int asize = a . getHeight ( ) ; int bsize = b . getHeight ( ) ; return bsize - asize ; } } ) ; int x = 0 ; int y = 0 ; BufferedImage result = new BufferedImage ( width , height , BufferedImage . TYPE_INT_ARGB ) ; Graphics g = result . getGraphics ( ) ; int rowHeight = 0 ; try { PrintStream pout = null ; if ( out != null ) { pout = new PrintStream ( new FileOutputStream ( new File ( out . getParentFile ( ) , out . getName ( ) + ".xml" ) ) ) ; pout . println ( "<sheet>" ) ; } for ( int i = 0 ; i < images . size ( ) ; i ++ ) { Sprite current = ( Sprite ) images . get ( i ) ; if ( x + current . getWidth ( ) > width ) { x = 0 ; y += rowHeight ; rowHeight = 0 ; } if ( rowHeight == 0 ) { rowHeight = current . getHeight ( ) + border ; } if ( out != null ) { pout . print ( "\t<sprite " ) ; pout . print ( "name=\"" + current . getName ( ) + "\" " ) ; pout . print ( "x=\"" + x + "\" " ) ; pout . print ( "y=\"" + y + "\" " ) ; pout . print ( "width=\"" + current . getWidth ( ) + "\" " ) ; pout . print ( "height=\"" + current . getHeight ( ) + "\" " ) ; pout . println ( "/>" ) ; } current . setPosition ( x , y ) ; g . drawImage ( current . getImage ( ) , x , y , null ) ; x += current . getWidth ( ) + border ; } g . dispose ( ) ; if ( out != null ) { pout . println ( "</sheet>" ) ; pout . close ( ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; IOException io = new IOException ( "Failed writing image XML" ) ; io . initCause ( e ) ; throw io ; } if ( out != null ) { try { ImageIO . write ( result , "PNG" , out ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; IOException io = new IOException ( "Failed writing image" ) ; io . initCause ( e ) ; throw io ; } } return new Sheet ( result , images ) ;
public class Selenified { /** * Sets any additional headers for the web services calls for each instance of the test suite being executed . * @ param clazz - the test suite class , used for making threadsafe storage of * application , allowing suites to have independent applications * under test , run at the same time * @ param context - the TestNG context associated with the test suite , used for * storing app url information * @ param servicesUser - the username required for authentication * @ param servicesPass - the password required for authentication */ protected static void setCredentials ( Selenified clazz , ITestContext context , String servicesUser , String servicesPass ) { } }
context . setAttribute ( clazz . getClass ( ) . getName ( ) + SERVICES_USER , servicesUser ) ; context . setAttribute ( clazz . getClass ( ) . getName ( ) + SERVICES_PASS , servicesPass ) ;
public class ContentSpec { /** * Sets the InjectionOptions that will be used by the Builder when building a book . * @ param injectionOptions The InjectionOptions to be used when building a book . */ public void setInjectionOptions ( final InjectionOptions injectionOptions ) { } }
if ( injectionOptions == null && this . injectionOptions == null ) { return ; } else if ( injectionOptions == null ) { removeChild ( this . injectionOptions ) ; this . injectionOptions = null ; } else if ( this . injectionOptions == null ) { this . injectionOptions = new KeyValueNode < InjectionOptions > ( CommonConstants . CS_INLINE_INJECTION_TITLE , injectionOptions ) ; appendChild ( this . injectionOptions , false ) ; } else { this . injectionOptions . setValue ( injectionOptions ) ; }
public class LineParser { /** * Returns an argument map fitting the given value key set ( using defined types ) . * @ param arguments input arguments to test arguments against * @ return argument map with correct value types */ public Map < SkbShellArgument , Object > getArgMap ( SkbShellArgument [ ] arguments ) { } }
Map < SkbShellArgument , Object > ret = new LinkedHashMap < SkbShellArgument , Object > ( ) ; if ( arguments != null ) { for ( Entry < String , String > entry : this . getArgMap ( ) . entrySet ( ) ) { for ( SkbShellArgument ssa : arguments ) { if ( ssa . getKey ( ) . equals ( entry . getKey ( ) ) ) { switch ( ssa . getType ( ) ) { case Boolean : ret . put ( ssa , Boolean . valueOf ( entry . getValue ( ) ) ) ; break ; case Double : ret . put ( ssa , Double . valueOf ( entry . getValue ( ) ) ) ; break ; case Integer : ret . put ( ssa , Integer . valueOf ( entry . getValue ( ) ) ) ; break ; case String : ret . put ( ssa , entry . getValue ( ) ) ; break ; case ListString : String [ ] ar = StringUtils . split ( entry . getValue ( ) , ';' ) ; if ( ar != null ) { List < String > val = new ArrayList < > ( ) ; for ( String s : ar ) { val . add ( s ) ; } ret . put ( ssa , val ) ; } break ; case ListInteger : String [ ] arInt = StringUtils . split ( entry . getValue ( ) , ';' ) ; if ( arInt != null ) { List < Integer > valInt = new ArrayList < > ( ) ; for ( String s : arInt ) { valInt . add ( Integer . valueOf ( s ) ) ; } ret . put ( ssa , valInt ) ; } break ; default : System . err . println ( "parser.getArgMap --> argument type not yet supported: " + ssa . getType ( ) ) ; // TODO do not use syserr prints break ; } } } } } return ret ;
public class ElementWithCardinalityImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case SimpleAntlrPackage . ELEMENT_WITH_CARDINALITY__ELEMENT : return getElement ( ) ; case SimpleAntlrPackage . ELEMENT_WITH_CARDINALITY__CARDINALITY : return getCardinality ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class FileExecutor { /** * 获取文件后缀 ( 包括点号 ) * @ param fileName 文件名 * @ return 后缀 * @ since 1.1.0 */ public static String getSuffix ( String fileName ) { } }
return fileName . substring ( fileName . lastIndexOf ( ValueConsts . DOT_SIGN ) ) ;
public class BaseMessageEndpointFactory { /** * Get the EJBMethodInfo object associated with a specified Method object . * @ param method - the target of this request . * @ return EJBMethodInfo for target of this request . Note , a null reference * is returned if Method is not a method of this EJB component . */ private EJBMethodInfoImpl getEJBMethodInfo ( Method method ) { } }
// Get target method signature . String targetSignature = MethodAttribUtils . methodSignature ( method ) ; // Search array of EJBMethodInfo object until the one that matches // target signature is found or all array elements are processed . EJBMethodInfoImpl minfo = null ; int n = ivMdbMethods . length ; for ( int i = 0 ; i < n ; ++ i ) { minfo = ivMdbMethods [ i ] ; if ( targetSignature . equals ( minfo . getMethodSignature ( ) ) ) { return minfo ; } } // Method not found , so return null . return null ;
public class Agg { /** * Get a { @ link Collector } that calculates the derived < code > RANK ( ) < / code > function given a specific ordering . */ public static < T , U > Collector < T , ? , Optional < Long > > rankBy ( U value , Function < ? super T , ? extends U > function , Comparator < ? super U > comparator ) { } }
return Collector . of ( ( ) -> new long [ ] { - 1L } , ( l , v ) -> { if ( l [ 0 ] == - 1L ) l [ 0 ] = 0L ; if ( comparator . compare ( value , function . apply ( v ) ) > 0 ) l [ 0 ] = l [ 0 ] + 1L ; } , ( l1 , l2 ) -> { l1 [ 0 ] = ( l1 [ 0 ] == - 1 ? 0L : l1 [ 0 ] ) + ( l2 [ 0 ] == - 1 ? 0L : l2 [ 0 ] ) ; return l1 ; } , l -> l [ 0 ] == - 1 ? Optional . empty ( ) : Optional . of ( ( long ) l [ 0 ] ) ) ;
public class Metrics { /** * GZip compress a string of bytes * @ param input * @ return a byte array */ public static byte [ ] gzip ( String input ) { } }
ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; GZIPOutputStream gzos = null ; try { gzos = new GZIPOutputStream ( baos ) ; gzos . write ( input . getBytes ( "UTF-8" ) ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } finally { if ( gzos != null ) { try { gzos . close ( ) ; } catch ( IOException ignore ) { } } } return baos . toByteArray ( ) ;
public class AmazonRoute53Client { /** * Gets information about the traffic policy instances that you created by using the current AWS account . * < note > * After you submit an < code > UpdateTrafficPolicyInstance < / code > request , there ' s a brief delay while Amazon Route 53 * creates the resource record sets that are specified in the traffic policy definition . For more information , see * the < code > State < / code > response element . * < / note > * Route 53 returns a maximum of 100 items in each response . If you have a lot of traffic policy instances , you can * use the < code > MaxItems < / code > parameter to list them in groups of up to 100. * @ param listTrafficPolicyInstancesRequest * A request to get information about the traffic policy instances that you created by using the current AWS * account . * @ return Result of the ListTrafficPolicyInstances operation returned by the service . * @ throws InvalidInputException * The input is not valid . * @ throws NoSuchTrafficPolicyInstanceException * No traffic policy instance exists with the specified ID . * @ sample AmazonRoute53 . ListTrafficPolicyInstances * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / ListTrafficPolicyInstances " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListTrafficPolicyInstancesResult listTrafficPolicyInstances ( ListTrafficPolicyInstancesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListTrafficPolicyInstances ( request ) ;
public class XmlUtils { /** * Returns the attribute with the given name from the given node . * If the respective attribute could not be obtained , the given * default value will be returned * @ param node The node to obtain the attribute from * @ param attributeName The name of the attribute * @ param defaultValue The default value to return when the specified * attribute could not be obtained * @ return The value of the attribute , or the default value */ public static String getAttributeValue ( Node node , String attributeName , String defaultValue ) { } }
NamedNodeMap attributes = node . getAttributes ( ) ; Node attributeNode = attributes . getNamedItem ( attributeName ) ; if ( attributeNode == null ) { return defaultValue ; } String value = attributeNode . getNodeValue ( ) ; if ( value == null ) { return defaultValue ; } return value ;
public class JDBDT { /** * Delete all data from a table , subject to a < code > WHERE < / code > * clause . * @ param table Table . * @ param where < code > WHERE < / code > clause * @ param args < code > WHERE < / code > clause arguments , if any . * @ return Number of deleted entries . * @ see # deleteAll ( Table ) * @ see # truncate ( Table ) */ @ SafeVarargs public static int deleteAllWhere ( Table table , String where , Object ... args ) { } }
return DBSetup . deleteAll ( CallInfo . create ( ) , table , where , args ) ;
public class AttributeEditorUtil { /** * creates a RepeatingView providing a suitable editor field for every attribute in the list . * @ param values map used for saving the data @ see org . openengsb . ui . common . wicket . model . MapModel */ public static RepeatingView createFieldList ( String id , List < AttributeDefinition > attributes , Map < String , String > values ) { } }
RepeatingView fields = new RepeatingView ( id ) ; for ( AttributeDefinition a : attributes ) { WebMarkupContainer row = new WebMarkupContainer ( a . getId ( ) ) ; MapModel < String , String > model = new MapModel < String , String > ( values , a . getId ( ) ) ; row . add ( createEditorField ( "row" , model , a ) ) ; fields . add ( row ) ; } return fields ;
public class ActionConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ActionConfiguration actionConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( actionConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( actionConfiguration . getConfiguration ( ) , CONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Zone { /** * Adds a Record to the Zone * @ param r The record to be added * @ see Record */ public void addRecord ( Record r ) { } }
Name name = r . getName ( ) ; int rtype = r . getRRsetType ( ) ; synchronized ( this ) { RRset rrset = findRRset ( name , rtype ) ; if ( rrset == null ) { rrset = new RRset ( r ) ; addRRset ( name , rrset ) ; } else { rrset . addRR ( r ) ; } }
public class CommonsLogger { /** * Delegates to the { @ link Log # error ( Object ) } method of the underlying * { @ link Log } instance . * However , this form avoids superfluous object creation when the logger is disabled * for level ERROR . * @ param format the format string * @ param arguments a list of 3 or more arguments */ @ Override public void error ( String format , Object ... arguments ) { } }
if ( logger . isErrorEnabled ( ) ) { FormattingTuple ft = MessageFormatter . arrayFormat ( format , arguments ) ; logger . error ( ft . getMessage ( ) , ft . getThrowable ( ) ) ; }
public class MultiPolygon { /** * Create a new instance of this class by passing in a formatted valid JSON String . If you are * creating a MultiPolygon object from scratch it is better to use one of the other provided * static factory methods such as { @ link # fromPolygons ( List ) } . * @ param json a formatted valid JSON string defining a GeoJson MultiPolygon * @ return a new instance of this class defined by the values passed inside this static factory * method * @ since 1.0.0 */ public static MultiPolygon fromJson ( String json ) { } }
GsonBuilder gson = new GsonBuilder ( ) ; gson . registerTypeAdapterFactory ( GeoJsonAdapterFactory . create ( ) ) ; return gson . create ( ) . fromJson ( json , MultiPolygon . class ) ;
public class AbstractPrintQuery { /** * Get the object returned by the given select statement . * @ param < T > class the return value will be casted to * @ param _ selectBldr select bldr the object is wanted for * @ return object for the select statement * @ throws EFapsException on error */ @ SuppressWarnings ( "unchecked" ) public < T > T getSelect ( final SelectBuilder _selectBldr ) throws EFapsException { } }
final OneSelect oneselect = this . selectStmt2OneSelect . get ( _selectBldr . toString ( ) ) ; return oneselect == null ? null : ( T ) oneselect . getObject ( ) ;
public class SecondaryRecordConverter { /** * Get the data on the end of this converter chain . * @ return The raw data . */ public Object getData ( ) { } }
try { Object objValue = super . getData ( ) ; // Get the actual key data . if ( objValue == null ) objValue = m_strNullValue ; if ( objValue == null ) return null ; if ( objValue . getClass ( ) == m_fieldKey . getDataClass ( ) ) m_fieldKey . setData ( objValue ) ; else m_fieldKey . setString ( objValue . toString ( ) ) ; if ( m_record . getTable ( ) . seek ( null ) == true ) // I use get , so the index matches the index of JComboBox return m_fieldData . getData ( ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; } return null ;
public class Positions { /** * Get the { @ link Collection } of set ( 1 ) bits inside a word or long word * @ param i * @ return * The not null { @ link Collection } of the indexes of set bits . * The { @ link Collection } implementation is an { @ link LinkedList } . */ public static Collection < Integer > ofSetBits ( long i ) { } }
final Collection < Integer > c = new LinkedList < Integer > ( ) ; int k = 0 ; while ( i != 0 ) { if ( ( i & 1L ) != 0 ) { c . add ( k ) ; } k ++ ; i >>>= 1 ; } return c ;
public class FIODataPoint { /** * Updates the data point data * @ param dp JsonObect with the data */ void update ( JsonObject dp ) { } }
for ( int i = 0 ; i < dp . names ( ) . size ( ) ; i ++ ) { datapoint . put ( dp . names ( ) . get ( i ) , dp . get ( dp . names ( ) . get ( i ) ) ) ; }
public class RedisInner { /** * Regenerate Redis cache ' s access keys . This operation requires write permission to the cache resource . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param keyType The Redis access key to regenerate . Possible values include : ' Primary ' , ' Secondary ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RedisAccessKeysInner object */ public Observable < RedisAccessKeysInner > regenerateKeyAsync ( String resourceGroupName , String name , RedisKeyType keyType ) { } }
return regenerateKeyWithServiceResponseAsync ( resourceGroupName , name , keyType ) . map ( new Func1 < ServiceResponse < RedisAccessKeysInner > , RedisAccessKeysInner > ( ) { @ Override public RedisAccessKeysInner call ( ServiceResponse < RedisAccessKeysInner > response ) { return response . body ( ) ; } } ) ;
public class MoreExecutors { /** * Converts the given ScheduledThreadPoolExecutor into a * ScheduledExecutorService that exits when the application is complete . It * does so by using daemon threads and adding a shutdown hook to wait for * their completion . * < p > This is mainly for fixed thread pools . * See { @ link Executors # newScheduledThreadPool ( int ) } . * @ param executor the executor to modify to make sure it exits when the * application is finished * @ param terminationTimeout how long to wait for the executor to * finish before terminating the JVM * @ param timeUnit unit of time for the time parameter * @ return an unmodifiable version of the input which will not hang the JVM */ @ Beta public static ScheduledExecutorService getExitingScheduledExecutorService ( ScheduledThreadPoolExecutor executor , long terminationTimeout , TimeUnit timeUnit ) { } }
return new Application ( ) . getExitingScheduledExecutorService ( executor , terminationTimeout , timeUnit ) ;
public class TimerService { /** * Check whether the timeout for the given key and ticket is still valid ( not yet unregistered * and not yet overwritten ) . * @ param key for which to check the timeout * @ param ticket of the timeout * @ return True if the timeout ticket is still valid ; otherwise false */ public boolean isValid ( K key , UUID ticket ) { } }
if ( timeouts . containsKey ( key ) ) { Timeout < K > timeout = timeouts . get ( key ) ; return timeout . getTicket ( ) . equals ( ticket ) ; } else { return false ; }
public class XbaseFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EObject create ( EClass eClass ) { } }
switch ( eClass . getClassifierID ( ) ) { case XbasePackage . XIF_EXPRESSION : return createXIfExpression ( ) ; case XbasePackage . XSWITCH_EXPRESSION : return createXSwitchExpression ( ) ; case XbasePackage . XCASE_PART : return createXCasePart ( ) ; case XbasePackage . XBLOCK_EXPRESSION : return createXBlockExpression ( ) ; case XbasePackage . XVARIABLE_DECLARATION : return createXVariableDeclaration ( ) ; case XbasePackage . XMEMBER_FEATURE_CALL : return createXMemberFeatureCall ( ) ; case XbasePackage . XFEATURE_CALL : return createXFeatureCall ( ) ; case XbasePackage . XCONSTRUCTOR_CALL : return createXConstructorCall ( ) ; case XbasePackage . XBOOLEAN_LITERAL : return createXBooleanLiteral ( ) ; case XbasePackage . XNULL_LITERAL : return createXNullLiteral ( ) ; case XbasePackage . XNUMBER_LITERAL : return createXNumberLiteral ( ) ; case XbasePackage . XSTRING_LITERAL : return createXStringLiteral ( ) ; case XbasePackage . XLIST_LITERAL : return createXListLiteral ( ) ; case XbasePackage . XSET_LITERAL : return createXSetLiteral ( ) ; case XbasePackage . XCLOSURE : return createXClosure ( ) ; case XbasePackage . XCASTED_EXPRESSION : return createXCastedExpression ( ) ; case XbasePackage . XBINARY_OPERATION : return createXBinaryOperation ( ) ; case XbasePackage . XUNARY_OPERATION : return createXUnaryOperation ( ) ; case XbasePackage . XPOSTFIX_OPERATION : return createXPostfixOperation ( ) ; case XbasePackage . XFOR_LOOP_EXPRESSION : return createXForLoopExpression ( ) ; case XbasePackage . XBASIC_FOR_LOOP_EXPRESSION : return createXBasicForLoopExpression ( ) ; case XbasePackage . XDO_WHILE_EXPRESSION : return createXDoWhileExpression ( ) ; case XbasePackage . XWHILE_EXPRESSION : return createXWhileExpression ( ) ; case XbasePackage . XTYPE_LITERAL : return createXTypeLiteral ( ) ; case XbasePackage . XINSTANCE_OF_EXPRESSION : return createXInstanceOfExpression ( ) ; case XbasePackage . XTHROW_EXPRESSION : return createXThrowExpression ( ) ; case XbasePackage . XTRY_CATCH_FINALLY_EXPRESSION : return createXTryCatchFinallyExpression ( ) ; case XbasePackage . XCATCH_CLAUSE : return createXCatchClause ( ) ; case XbasePackage . XASSIGNMENT : return createXAssignment ( ) ; case XbasePackage . XRETURN_EXPRESSION : return createXReturnExpression ( ) ; case XbasePackage . XSYNCHRONIZED_EXPRESSION : return createXSynchronizedExpression ( ) ; default : throw new IllegalArgumentException ( "The class '" + eClass . getName ( ) + "' is not a valid classifier" ) ; }
public class UserTagHandler { /** * Iterate over all TagAttributes and set them on the FaceletContext ' s VariableMapper , then include the target * Facelet . Finally , replace the old VariableMapper . * @ see TagAttribute # getValueExpression ( FaceletContext , Class ) * @ see javax . el . VariableMapper * @ see javax . faces . view . facelets . FaceletHandler # apply ( javax . faces . view . facelets . FaceletContext , * javax . faces . component . UIComponent ) */ public void apply ( FaceletContext ctx , UIComponent parent ) throws IOException , FacesException , FaceletException , ELException { } }
AbstractFaceletContext actx = ( AbstractFaceletContext ) ctx ; // eval include try { String [ ] names = null ; ValueExpression [ ] values = null ; if ( this . _vars . length > 0 ) { names = new String [ _vars . length ] ; values = new ValueExpression [ _vars . length ] ; for ( int i = 0 ; i < _vars . length ; i ++ ) { names [ i ] = _vars [ i ] . getLocalName ( ) ; values [ i ] = _vars [ i ] . getValueExpression ( ctx , Object . class ) ; } } actx . pushTemplateContext ( new TemplateContextImpl ( ) ) ; actx . pushClient ( this ) ; FaceletCompositionContext fcc = FaceletCompositionContext . getCurrentInstance ( ctx ) ; String uniqueId = fcc . startComponentUniqueIdSection ( ) ; try { if ( this . _vars . length > 0 ) { if ( ELExpressionCacheMode . alwaysRecompile . equals ( actx . getELExpressionCacheMode ( ) ) ) { FaceletState faceletState = ComponentSupport . getFaceletState ( ctx , parent , true ) ; for ( int i = 0 ; i < this . _vars . length ; i ++ ) { // ( ( AbstractFaceletContext ) ctx ) . getTemplateContext ( ) . setParameter ( names [ i ] , values [ i ] ) ; faceletState . putBinding ( uniqueId , names [ i ] , values [ i ] ) ; ValueExpression ve ; if ( ExternalSpecifications . isUnifiedELAvailable ( ) ) { ve = new FaceletStateValueExpressionUEL ( uniqueId , names [ i ] ) ; } else { ve = new FaceletStateValueExpression ( uniqueId , names [ i ] ) ; } actx . getTemplateContext ( ) . setParameter ( names [ i ] , ve ) ; } } else { for ( int i = 0 ; i < this . _vars . length ; i ++ ) { ( ( AbstractFaceletContext ) ctx ) . getTemplateContext ( ) . setParameter ( names [ i ] , values [ i ] ) ; } } } // Disable caching always , even in ' always ' mode // The only mode that can support EL caching in this condition is alwaysRedirect . if ( ! ELExpressionCacheMode . alwaysRecompile . equals ( actx . getELExpressionCacheMode ( ) ) ) { actx . getTemplateContext ( ) . setAllowCacheELExpressions ( false ) ; } ctx . includeFacelet ( parent , this . _location ) ; } finally { fcc . endComponentUniqueIdSection ( ) ; } } catch ( FileNotFoundException e ) { throw new TagException ( this . tag , e . getMessage ( ) ) ; } finally { // make sure we undo our changes actx . popClient ( this ) ; actx . popTemplateContext ( ) ; // ctx . setVariableMapper ( orig ) ; }
public class StencilOperands { /** * Coerce to a collection * @ param val * Object to be coerced . * @ return The Collection coerced value . */ public Collection < ? > toCollection ( Object val ) { } }
if ( val == null ) { return Collections . emptyList ( ) ; } else if ( val instanceof Collection < ? > ) { return ( Collection < ? > ) val ; } else if ( val . getClass ( ) . isArray ( ) ) { return newArrayList ( ( Object [ ] ) val ) ; } else if ( val instanceof Map < ? , ? > ) { return ( ( Map < ? , ? > ) val ) . entrySet ( ) ; } else { return newArrayList ( val ) ; }
public class DeploymentMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Deployment deployment , ProtocolMarshaller protocolMarshaller ) { } }
if ( deployment == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deployment . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( deployment . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( deployment . getTaskDefinition ( ) , TASKDEFINITION_BINDING ) ; protocolMarshaller . marshall ( deployment . getDesiredCount ( ) , DESIREDCOUNT_BINDING ) ; protocolMarshaller . marshall ( deployment . getPendingCount ( ) , PENDINGCOUNT_BINDING ) ; protocolMarshaller . marshall ( deployment . getRunningCount ( ) , RUNNINGCOUNT_BINDING ) ; protocolMarshaller . marshall ( deployment . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( deployment . getUpdatedAt ( ) , UPDATEDAT_BINDING ) ; protocolMarshaller . marshall ( deployment . getLaunchType ( ) , LAUNCHTYPE_BINDING ) ; protocolMarshaller . marshall ( deployment . getPlatformVersion ( ) , PLATFORMVERSION_BINDING ) ; protocolMarshaller . marshall ( deployment . getNetworkConfiguration ( ) , NETWORKCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Timestamp { /** * Returns a Timestamp , precise to the second , with a given local offset . * This is equivalent to the corresponding Ion value * { @ code YYYY - MM - DDThh : mm : ss . sss + - oo : oo } , where { @ code oo : oo } represents * the hour and minutes of the local offset from UTC . * @ param second must be at least zero and less than 60. * Must not be null . * @ param offset * the local offset from UTC , measured in minutes ; * may be { @ code null } to represent an unknown local offset */ public static Timestamp forSecond ( int year , int month , int day , int hour , int minute , BigDecimal second , Integer offset ) { } }
// Tease apart the whole and fractional seconds . // Storing them separately is silly . int s = second . intValue ( ) ; BigDecimal frac = second . subtract ( BigDecimal . valueOf ( s ) ) ; return new Timestamp ( Precision . SECOND , year , month , day , hour , minute , s , frac , offset , APPLY_OFFSET_YES ) ;
public class SegmentIndexBuffer { /** * Reads from the specified < code > channel < / code > into this SegmentIndexBuffer . * @ param channel - the readable channel * @ return the number of bytes read from the specified channel . * @ throws IOException */ public int read ( ReadableByteChannel channel ) throws IOException { } }
// Header : segId ( 4 ) , lastForcedTime ( 8 ) , size ( 4) ByteBuffer header = ByteBuffer . allocate ( HEADER_LENGTH ) ; read ( channel , header , HEADER_LENGTH , "Invalid Header" ) ; int segmentId = header . getInt ( 0 ) ; long lastForcedTime = header . getLong ( 4 ) ; int size = header . getInt ( 12 ) ; // Data int dataLength = size << 3 ; ByteBuffer data = ByteBuffer . allocate ( dataLength ) ; read ( channel , data , dataLength , "Invalid Data" ) ; // Footer - MD5Digest ( 16) ByteBuffer md5 = ByteBuffer . allocate ( MD5_LENGTH ) ; read ( channel , md5 , MD5_LENGTH , "Invalid MD5" ) ; try { MessageDigest m = MessageDigest . getInstance ( "MD5" ) ; m . reset ( ) ; m . update ( header . array ( ) ) ; m . update ( data . array ( ) ) ; byte [ ] digest = ensure128BitMD5 ( m . digest ( ) ) ; if ( Arrays . equals ( md5 . array ( ) , digest ) ) { setSegmentId ( segmentId ) ; setSegmentLastForcedTime ( lastForcedTime ) ; data . flip ( ) ; put ( data ) ; } } catch ( NoSuchAlgorithmException e ) { throw new IOException ( e ) ; } return ( dataLength + HEADER_FOOTER_LENGTH ) ;
public class CmsFlexCacheKey { /** * Returns resource name from given key name . < p > * @ param keyName given name of key . * @ return name of resource if key is valid , otherwise " " */ public static String getResourceName ( String keyName ) { } }
if ( keyName . endsWith ( CmsFlexCache . CACHE_OFFLINESUFFIX ) | keyName . endsWith ( CmsFlexCache . CACHE_ONLINESUFFIX ) ) { return keyName . split ( " " ) [ 0 ] ; } else { return "" ; }
public class CommandLineRunner { /** * < p > run . < / p > * @ param args a { @ link java . lang . String } object . * @ throws java . lang . Exception if any . */ public void run ( String ... args ) throws Exception { } }
List < String > parameters = parseCommandLine ( args ) ; if ( ! parameters . isEmpty ( ) ) { runClassicRunner ( parameters ) ; }
public class ReflectionUtils { /** * Returns the sum of the object transformation cost for each class in the source * argument list . * @ param srcArgs the source arguments * @ param destArgs the destination arguments * @ return the accumulated weight for all arguments */ public static float getTypeDifferenceWeight ( Class < ? > [ ] srcArgs , Class < ? > [ ] destArgs ) { } }
float weight = 0.0f ; for ( int i = 0 ; i < srcArgs . length ; i ++ ) { Class < ? > srcClass = srcArgs [ i ] ; Class < ? > destClass = destArgs [ i ] ; weight += getTypeDifferenceWeight ( srcClass , destClass ) ; if ( weight == Float . MAX_VALUE ) { break ; } } return weight ;
public class GreenMailBean { /** * Creates the server setup , depending on the protocol flags . * @ return the configured server setups . */ private ServerSetup [ ] createServerSetup ( ) { } }
List < ServerSetup > setups = new ArrayList < > ( ) ; if ( smtpProtocol ) { smtpServerSetup = createTestServerSetup ( ServerSetup . SMTP ) ; setups . add ( smtpServerSetup ) ; } if ( smtpsProtocol ) { smtpsServerSetup = createTestServerSetup ( ServerSetup . SMTPS ) ; setups . add ( smtpsServerSetup ) ; } if ( pop3Protocol ) { setups . add ( createTestServerSetup ( ServerSetup . POP3 ) ) ; } if ( pop3sProtocol ) { setups . add ( createTestServerSetup ( ServerSetup . POP3S ) ) ; } if ( imapProtocol ) { setups . add ( createTestServerSetup ( ServerSetup . IMAP ) ) ; } if ( imapsProtocol ) { setups . add ( createTestServerSetup ( ServerSetup . IMAPS ) ) ; } return setups . toArray ( new ServerSetup [ setups . size ( ) ] ) ;
public class MyProxy { /** * Retrieves credential information from MyProxy server . * @ param credential * The local GSI credentials to use for authentication . * @ param params * The parameters for the info operation . * @ exception MyProxyException * If an error occurred during the operation . * @ return The array of credential information of all * the user ' s credentials . */ public CredentialInfo [ ] info ( GSSCredential credential , InfoParams params ) throws MyProxyException { } }
if ( credential == null ) { throw new IllegalArgumentException ( "credential == null" ) ; } if ( params == null ) { throw new IllegalArgumentException ( "params == null" ) ; } String msg = params . makeRequest ( ) ; CredentialInfo [ ] creds = null ; Socket gsiSocket = null ; OutputStream out = null ; InputStream in = null ; try { gsiSocket = getSocket ( credential ) ; out = gsiSocket . getOutputStream ( ) ; in = gsiSocket . getInputStream ( ) ; if ( ! ( ( GssSocket ) gsiSocket ) . getContext ( ) . getConfState ( ) ) throw new Exception ( "Confidentiality requested but not available" ) ; // send message out . write ( msg . getBytes ( ) ) ; out . flush ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Req sent:" + params ) ; } InputStream reply = handleReply ( in ) ; String line = null ; String value = null ; Map credMap = new HashMap ( ) ; CredentialInfo info = new CredentialInfo ( ) ; while ( ( line = readLine ( reply ) ) != null ) { if ( line . startsWith ( CRED_START_TIME ) ) { value = line . substring ( CRED_START_TIME . length ( ) ) ; info . setStartTime ( Long . parseLong ( value ) * 1000 ) ; } else if ( line . startsWith ( CRED_END_TIME ) ) { value = line . substring ( CRED_END_TIME . length ( ) ) ; info . setEndTime ( Long . parseLong ( value ) * 1000 ) ; } else if ( line . startsWith ( CRED_OWNER ) ) { info . setOwner ( line . substring ( CRED_OWNER . length ( ) ) ) ; } else if ( line . startsWith ( CRED_NAME ) ) { info . setName ( line . substring ( CRED_NAME . length ( ) ) ) ; } else if ( line . startsWith ( CRED_DESC ) ) { info . setDescription ( line . substring ( CRED_DESC . length ( ) ) ) ; } else if ( line . startsWith ( CRED_RENEWER ) ) { info . setRenewers ( line . substring ( CRED_RENEWER . length ( ) ) ) ; } else if ( line . startsWith ( CRED_RETRIEVER ) ) { info . setRetrievers ( line . substring ( CRED_RETRIEVER . length ( ) ) ) ; } else if ( line . startsWith ( CRED ) ) { int pos = line . indexOf ( '=' , CRED . length ( ) ) ; if ( pos == - 1 ) { continue ; } value = line . substring ( pos + 1 ) ; if ( matches ( line , pos + 1 , OWNER ) ) { String name = getCredName ( line , pos , OWNER ) ; getCredentialInfo ( credMap , name ) . setOwner ( value ) ; } else if ( matches ( line , pos + 1 , START_TIME ) ) { String name = getCredName ( line , pos , START_TIME ) ; getCredentialInfo ( credMap , name ) . setStartTime ( Long . parseLong ( value ) * 1000 ) ; } else if ( matches ( line , pos + 1 , END_TIME ) ) { String name = getCredName ( line , pos , END_TIME ) ; getCredentialInfo ( credMap , name ) . setEndTime ( Long . parseLong ( value ) * 1000 ) ; } else if ( matches ( line , pos + 1 , DESC ) ) { String name = getCredName ( line , pos , DESC ) ; getCredentialInfo ( credMap , name ) . setDescription ( value ) ; } else if ( matches ( line , pos + 1 , RENEWER ) ) { String name = getCredName ( line , pos , RENEWER ) ; getCredentialInfo ( credMap , name ) . setRenewers ( value ) ; } else if ( matches ( line , pos + 1 , RETRIEVER ) ) { String name = getCredName ( line , pos , RETRIEVER ) ; getCredentialInfo ( credMap , name ) . setRetrievers ( value ) ; } } } creds = new CredentialInfo [ 1 + credMap . size ( ) ] ; creds [ 0 ] = info ; // defailt creds at position 0 if ( credMap . size ( ) > 0 ) { int i = 1 ; Iterator iter = credMap . entrySet ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { Map . Entry entry = ( Map . Entry ) iter . next ( ) ; creds [ i ++ ] = ( CredentialInfo ) entry . getValue ( ) ; } } return creds ; } catch ( Exception e ) { throw new MyProxyException ( "MyProxy info failed." , e ) ; } finally { // close socket close ( out , in , gsiSocket ) ; }
public class Gram { /** * Completes a GSI delegation handshake with a globus job manager * that has agreed to a ( previously sent ) GRAM " renew " request . After * the job manager receives the last token in the handshake , it responds * with a message following the GRAM protocol indicating delegation success * or failure . * @ param context Previously established context with job manager * @ param newCred The credential used to generate a new delegated proxy * @ param out Stream used to send messages to job manager * @ param in Stream used to receive messages from job manager * @ throws GSSException if an error occurs during token wrapping / unwrapping * @ throws IOException if a communication error occurs * @ return the GRAM response message indicating delegation status */ private static GatekeeperReply renewDelegationHandshake ( ExtendedGSSContext context , GSSCredential newCred , GSIGssOutputStream out , GSIGssInputStream in ) throws GSSException , IOException { } }
byte [ ] input = new byte [ 0 ] ; byte [ ] output = null ; do { output = produceRenewToken ( context , context . initDelegation ( newCred , null , 0 , input , 0 , input . length ) ) ; out . writeToken ( output ) ; if ( ! context . isDelegationFinished ( ) ) { input = consumeRenewToken ( context , in . readHandshakeToken ( ) ) ; } } while ( ! context . isDelegationFinished ( ) ) ; GatekeeperReply reply = new GatekeeperReply ( in ) ; return reply ;
public class AuditCollectorUtil { /** * Get code quality audit results */ private static Audit getCodeQualityAudit ( JSONArray jsonArray , JSONArray global ) { } }
LOGGER . info ( "NFRR Audit Collector auditing CODE_QUALITY" ) ; Audit audit = new Audit ( ) ; audit . setType ( AuditType . CODE_QUALITY ) ; Audit basicAudit ; if ( ( basicAudit = doBasicAuditCheck ( jsonArray , global , AuditType . CODE_QUALITY ) ) != null ) { return basicAudit ; } audit . setAuditStatus ( AuditStatus . OK ) ; audit . setDataStatus ( DataStatus . OK ) ; for ( Object o : jsonArray ) { Optional < Object > urlOptObj = Optional . ofNullable ( ( ( JSONObject ) o ) . get ( STR_URL ) ) ; urlOptObj . ifPresent ( urlObj -> audit . getUrl ( ) . add ( urlOptObj . get ( ) . toString ( ) ) ) ; JSONArray auditJO = ( JSONArray ) ( ( JSONObject ) o ) . get ( STR_AUDITSTATUSES ) ; auditJO . stream ( ) . map ( aj -> audit . getAuditStatusCodes ( ) . add ( ( String ) aj ) ) ; boolean ok = false ; for ( Object s : auditJO ) { String status = ( String ) s ; audit . getAuditStatusCodes ( ) . add ( status ) ; if ( CodeQualityAuditStatus . CODE_QUALITY_AUDIT_OK . name ( ) . equalsIgnoreCase ( status ) ) { ok = true ; break ; } if ( CodeQualityAuditStatus . CODE_QUALITY_DETAIL_MISSING . name ( ) . equalsIgnoreCase ( status ) ) { audit . setAuditStatus ( AuditStatus . NA ) ; audit . setDataStatus ( DataStatus . NO_DATA ) ; return audit ; } } if ( ! ok ) { audit . setAuditStatus ( AuditStatus . FAIL ) ; return audit ; } } return audit ;
public class Requests { /** * Create a new { @ link PublishDelete } instance for a link between two * { @ link Identifier } instances in order to delete its metadata that matches * the given filter . * @ param i1 the first { @ link Identifier } of the link * @ param i2 the second { @ link Identifier } of the link * @ param filter a filter that expresses the metadata that shall be deleted * @ return the new { @ link PublishDelete } instance */ public static PublishDelete createPublishDelete ( Identifier i1 , Identifier i2 , String filter ) { } }
PublishDelete pd = createPublishDelete ( ) ; fillIdentifierHolder ( pd , i1 , i2 ) ; pd . setFilter ( filter ) ; return pd ;
public class ClassUtils { /** * 根据Field类型设置值 * @ param field */ public static void setFieldValeByType ( Field field , Object obj , String value ) throws Exception { } }
Class < ? > type = field . getType ( ) ; String typeName = type . getName ( ) ; if ( typeName . equals ( "int" ) ) { if ( value . equals ( "" ) ) { value = "0" ; } field . set ( obj , Integer . valueOf ( value ) ) ; } else if ( typeName . equals ( "long" ) ) { if ( value . equals ( "" ) ) { value = "0" ; } field . set ( obj , Long . valueOf ( value ) ) ; } else if ( typeName . equals ( "boolean" ) ) { if ( value . equals ( "" ) ) { value = "false" ; } field . set ( obj , Boolean . valueOf ( value ) ) ; } else if ( typeName . equals ( "double" ) ) { if ( value . equals ( "" ) ) { value = "0.0" ; } field . set ( obj , Double . valueOf ( value ) ) ; } else { field . set ( obj , value ) ; }
public class ApiClient { /** * { @ link # execute ( Call , Type ) } * @ param < T > Type * @ param call An instance of the Call object * @ throws ApiException If fail to execute the call * @ return ApiResponse & lt ; T & gt ; */ public < T > ApiResponse < T > execute ( Call call ) throws ApiException { } }
return execute ( call , null ) ;
public class MDRRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . MDRRG__RG_LENGTH : setRGLength ( RG_LENGTH_EDEFAULT ) ; return ; case AfplibPackage . MDRRG__TRIPLETS : getTriplets ( ) . clear ( ) ; return ; } super . eUnset ( featureID ) ;
public class OperationsInner { /** * Get available resource provider actions ( operations ) . * Lists all available actions exposed by the Data Migration Service resource provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ServiceOperationInner & gt ; object */ public Observable < Page < ServiceOperationInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < ServiceOperationInner > > , Page < ServiceOperationInner > > ( ) { @ Override public Page < ServiceOperationInner > call ( ServiceResponse < Page < ServiceOperationInner > > response ) { return response . body ( ) ; } } ) ;
public class SparkComputationGraph { /** * Score the examples individually , using the default batch size { @ link # DEFAULT _ EVAL _ SCORE _ BATCH _ SIZE } . Unlike { @ link # calculateScore ( JavaRDD , boolean ) } , * this method returns a score for each example separately . If scoring is needed for specific examples use either * { @ link # scoreExamples ( JavaPairRDD , boolean ) } or { @ link # scoreExamples ( JavaPairRDD , boolean , int ) } which can have * a key for each example . * @ param data Data to score * @ param includeRegularizationTerms If true : include the l1 / l2 regularization terms with the score ( if any ) * @ return A JavaDoubleRDD containing the scores of each example * @ see ComputationGraph # scoreExamples ( MultiDataSet , boolean ) */ public JavaDoubleRDD scoreExamplesMultiDataSet ( JavaRDD < MultiDataSet > data , boolean includeRegularizationTerms ) { } }
return scoreExamplesMultiDataSet ( data , includeRegularizationTerms , DEFAULT_EVAL_SCORE_BATCH_SIZE ) ;
public class DefaultResourceCache { /** * ( non - Javadoc ) * @ see * org . javamoney . moneta . loader . format . ResourceCache # write ( java . lang . String * , byte [ ] ) */ @ Override public void write ( String resourceId , byte [ ] data ) { } }
try { File file = this . cachedResources . get ( resourceId ) ; if ( Objects . isNull ( file ) ) { file = new File ( localDir , resourceId + SUFFIX ) ; Files . write ( file . toPath ( ) , data ) ; this . cachedResources . put ( resourceId , file ) ; } else { Files . write ( file . toPath ( ) , data ) ; } } catch ( Exception e ) { LOG . log ( Level . WARNING , "Caching of resource failed: " + resourceId , e ) ; }
public class Issue { /** * Set up the screen input fields . */ public void setupFields ( ) { } }
FieldInfo field = null ; field = new FieldInfo ( this , ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , LAST_CHANGED , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Date . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DELETED , 10 , null , new Boolean ( false ) ) ; field . setDataClass ( Boolean . class ) ; field . setHidden ( true ) ; field = new FieldInfo ( this , DESCRIPTION , 120 , null , null ) ; field = new FieldInfo ( this , PROJECT_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , PROJECT_VERSION_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ISSUE_TYPE_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ISSUE_STATUS_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ASSIGNED_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ISSUE_PRIORITY_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ISSUE_SEQUENCE , 10 , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , ENTERED_DATE , 25 , null , null ) ; field . setDataClass ( Date . class ) ; field = new FieldInfo ( this , ENTERED_BY_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , CHANGED_DATE , 25 , null , null ) ; field . setDataClass ( Date . class ) ; field = new FieldInfo ( this , CHANGED_BY_USER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ; field = new FieldInfo ( this , CLASS_INFO_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; field . setDataClass ( Integer . class ) ;
public class BasicApiConnection { /** * To be migrated from { @ class ApiConnection } */ @ Override void confirmLogin ( String token , String username , String password ) throws IOException , LoginFailedException , MediaWikiApiErrorException { } }
super . confirmLogin ( token , username , password ) ;
public class MultipleLoaderClassResolver { /** * Loads class from multiple ClassLoader . * If this method can not load target class , * it tries to add package java . lang ( default package ) * and load target class . * Still , if it can not the class , throws ClassNotFoundException . * ( behavior is put together on DefaultClassResolver . ) * @ param className class name - - that wants to load it . * @ param loaderMap map - - that has VALUES ClassLoader ( KEYS are arbitary ) . * @ return loaded class from ClassLoader defined loaderMap . */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public Class classForName ( String className , Map loaderMap ) throws ClassNotFoundException { Collection < ClassLoader > loaders = null ; // add context - classloader loaders = new HashSet < ClassLoader > ( ) ; loaders . add ( Thread . currentThread ( ) . getContextClassLoader ( ) ) ; if ( loaderMap != null && ! loaderMap . isEmpty ( ) ) { loaders . addAll ( loaderMap . values ( ) ) ; } Class clazz = null ; ClassNotFoundException lastCause = null ; for ( Iterator < ClassLoader > it = loaders . iterator ( ) ; it . hasNext ( ) ; ) { ClassLoader loader = it . next ( ) ; try { clazz = loader . loadClass ( className ) ; return clazz ; } catch ( ClassNotFoundException fqcnException ) { lastCause = fqcnException ; try { if ( className . indexOf ( '.' ) == - 1 ) { clazz = loader . loadClass ( "java.lang." + className ) ; return clazz ; } } catch ( ClassNotFoundException defaultClassException ) { lastCause = defaultClassException ; // try next loader . } } } // can ' t load class at end . throw lastCause ;
public class SettingsPack { /** * Starts the dht node and makes the trackerless service available to * torrents . * @ param value * @ return this */ public SettingsPack enableDht ( boolean value ) { } }
sp . set_bool ( settings_pack . bool_types . enable_dht . swigValue ( ) , value ) ; return this ;
public class MergePolicyValidator { /** * Checks the merge policy configuration in the context of an { @ link ICache } . * @ param mergePolicyClassname the configured merge policy of the cache * @ param mergeTypeProvider the { @ link SplitBrainMergeTypeProvider } of the cache * @ param mergePolicyProvider the { @ link CacheMergePolicyProvider } to resolve merge policy classes */ static void checkCacheMergePolicy ( String mergePolicyClassname , SplitBrainMergeTypeProvider mergeTypeProvider , CacheMergePolicyProvider mergePolicyProvider ) { } }
if ( mergePolicyProvider == null ) { return ; } Object mergePolicyInstance = getMergePolicyInstance ( mergePolicyProvider , mergePolicyClassname ) ; checkMergePolicy ( mergeTypeProvider , mergePolicyInstance ) ;
public class MediaApi { /** * Create open media interaction * Create a new open media interaction * @ param mediatype The media channel . ( required ) * @ param createData Request parameters . ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > createOpenMediaWithHttpInfo ( String mediatype , CreateData1 createData ) throws ApiException { } }
com . squareup . okhttp . Call call = createOpenMediaValidateBeforeCall ( mediatype , createData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class PatternsImpl { /** * Returns an application version ' s patterns . * @ param appId The application ID . * @ param versionId The version ID . * @ param getPatternsOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; PatternRuleInfo & gt ; object if successful . */ public List < PatternRuleInfo > getPatterns ( UUID appId , String versionId , GetPatternsOptionalParameter getPatternsOptionalParameter ) { } }
return getPatternsWithServiceResponseAsync ( appId , versionId , getPatternsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Promise { /** * see IPromise ( inheriting Callback ) interface */ @ Override public IPromise < T > thenAnd ( Supplier < IPromise < T > > callable ) { } }
Promise res = new Promise < > ( ) ; then ( new Callback < T > ( ) { @ Override public void complete ( T result , Object error ) { if ( Actor . isError ( error ) ) { res . complete ( null , error ) ; } else { IPromise < T > call = null ; call = callable . get ( ) . then ( res ) ; } } } ) ; return res ;
public class OpenIabHelper { /** * Queries the inventory . This will query all owned items from the server , as well as * information on additional skus , if specified . This method may block or take long to execute . * Do not call from the UI thread . For that , use the non - blocking version { @ link # queryInventoryAsync ( boolean , java . util . List , java . util . List , org . onepf . oms . appstore . googleUtils . IabHelper . QueryInventoryFinishedListener ) } . * @ param querySkuDetails if true , SKU details ( price , description , etc ) will be queried as well * as purchase information . * @ param moreItemSkus additional PRODUCT skus to query information on , regardless of ownership . * Ignored if null or if querySkuDetails is false . * @ param moreSubsSkus additional SUBSCRIPTIONS skus to query information on , regardless of ownership . * Ignored if null or if querySkuDetails is false . * @ throws IabException if a problem occurs while refreshing the inventory . */ @ Nullable public Inventory queryInventory ( final boolean querySkuDetails , @ Nullable final List < String > moreItemSkus , @ Nullable final List < String > moreSubsSkus ) throws IabException { } }
if ( Utils . uiThread ( ) ) { throw new IllegalStateException ( "Must not be called from the UI thread" ) ; } final Appstore appstore = this . appstore ; final AppstoreInAppBillingService appStoreBillingService = this . appStoreBillingService ; if ( setupState != SETUP_RESULT_SUCCESSFUL || appstore == null || appStoreBillingService == null ) { return null ; } final List < String > moreItemStoreSkus ; final SkuManager skuManager = SkuManager . getInstance ( ) ; if ( moreItemSkus != null ) { moreItemStoreSkus = new ArrayList < String > ( moreItemSkus . size ( ) ) ; for ( String sku : moreItemSkus ) { moreItemStoreSkus . add ( skuManager . getStoreSku ( appstore . getAppstoreName ( ) , sku ) ) ; } } else { moreItemStoreSkus = null ; } final List < String > moreSubsStoreSkus ; if ( moreSubsSkus != null ) { moreSubsStoreSkus = new ArrayList < String > ( moreSubsSkus . size ( ) ) ; for ( String sku : moreSubsSkus ) { moreSubsStoreSkus . add ( skuManager . getStoreSku ( appstore . getAppstoreName ( ) , sku ) ) ; } } else { moreSubsStoreSkus = null ; } return appStoreBillingService . queryInventory ( querySkuDetails , moreItemStoreSkus , moreSubsStoreSkus ) ;
public class IcsAbsSpinner { /** * Jump directly to a specific item in the adapter data . */ public void setSelection ( int position , boolean animate ) { } }
// Animate only if requested position is already on screen somewhere boolean shouldAnimate = animate && mFirstPosition <= position && position <= mFirstPosition + getChildCount ( ) - 1 ; setSelectionInt ( position , shouldAnimate ) ;
public class FunctionParamBuilder { /** * Add optional parameters of the given type to the end of the param list . * @ param types Types for each optional parameter . The builder will make them * undefine - able . * @ return False if this is called after var args are added . */ public boolean addOptionalParams ( JSType ... types ) { } }
if ( hasVarArgs ( ) ) { return false ; } for ( JSType type : types ) { newParameter ( registry . createOptionalType ( type ) ) . setOptionalArg ( true ) ; } return true ;
public class SDVariable { /** * See { @ link # add ( String , SDVariable ) } */ public SDVariable add ( SDVariable other ) { } }
return add ( sameDiff . generateNewVarName ( AddOp . OP_NAME , 0 ) , other ) ;
public class TransliteratorRegistry { /** * Remove a source - target / variant from the specDAG . */ private void removeSTV ( String source , String target , String variant ) { } }
// assert ( source . length ( ) > 0 ) ; // assert ( target . length ( ) > 0 ) ; CaseInsensitiveString cisrc = new CaseInsensitiveString ( source ) ; CaseInsensitiveString citrg = new CaseInsensitiveString ( target ) ; CaseInsensitiveString civar = new CaseInsensitiveString ( variant ) ; Map < CaseInsensitiveString , List < CaseInsensitiveString > > targets = specDAG . get ( cisrc ) ; if ( targets == null ) { return ; // should never happen for valid s - t / v } List < CaseInsensitiveString > variants = targets . get ( citrg ) ; if ( variants == null ) { return ; // should never happen for valid s - t / v } variants . remove ( civar ) ; if ( variants . size ( ) == 0 ) { targets . remove ( citrg ) ; // should delete variants if ( targets . size ( ) == 0 ) { specDAG . remove ( cisrc ) ; // should delete targets } }
public class FSDirectory { /** * Add node to parent node when loading the image . */ INodeDirectory addToParent ( byte [ ] src , INodeDirectory parentINode , INode newNode , boolean propagateModTime , int childIndex ) { } }
// NOTE : This does not update space counts for parents // add new node to the parent INodeDirectory newParent = null ; writeLock ( ) ; try { try { newParent = rootDir . addToParent ( src , newNode , parentINode , false , propagateModTime , childIndex ) ; cacheName ( newNode ) ; } catch ( FileNotFoundException e ) { return null ; } if ( newParent == null ) return null ; if ( ! newNode . isDirectory ( ) ) { // Add block - > file mapping INodeFile newF = ( INodeFile ) newNode ; BlockInfo [ ] blocks = newF . getBlocks ( ) ; for ( int i = 0 ; i < blocks . length ; i ++ ) { newF . setBlock ( i , getFSNamesystem ( ) . blocksMap . addINodeForLoading ( blocks [ i ] , newF ) ) ; } } } finally { writeUnlock ( ) ; } return newParent ;
public class JsonSerializer { /** * Serializes an object into JSON output . * @ param writer { @ link JsonWriter } used to write the serialized JSON * @ param value Object to serialize * @ param ctx Context for the full serialization process * @ throws JsonSerializationException if an error occurs during the serialization */ public void serialize ( JsonWriter writer , T value , JsonSerializationContext ctx ) throws JsonSerializationException { } }
serialize ( writer , value , ctx , JsonSerializerParameters . DEFAULT ) ;
public class Spinner { /** * Set an adapter for this Spinner . * @ param adapter */ public void setAdapter ( SpinnerAdapter adapter ) { } }
if ( mAdapter != null ) mAdapter . unregisterDataSetObserver ( mDataSetObserver ) ; mRecycler . clear ( ) ; mAdapter = adapter ; mAdapter . registerDataSetObserver ( mDataSetObserver ) ; onDataChanged ( ) ; if ( mPopup != null ) mPopup . setAdapter ( new DropDownAdapter ( adapter ) ) ; else mTempAdapter = new DropDownAdapter ( adapter ) ;
public class RecordReaderConverter { /** * Write all values from the specified record reader to the specified record writer . * Optionally , close the record writer on completion * @ param reader Record reader ( source of data ) * @ param writer Record writer ( location to write data ) * @ param closeOnCompletion if true : close the record writer once complete , via { @ link RecordWriter # close ( ) } * @ throws IOException If underlying reader / writer throws an exception */ public static void convert ( RecordReader reader , RecordWriter writer , boolean closeOnCompletion ) throws IOException { } }
if ( ! reader . hasNext ( ) ) { throw new UnsupportedOperationException ( "Cannot convert RecordReader: reader has no next element" ) ; } while ( reader . hasNext ( ) ) { writer . write ( reader . next ( ) ) ; } if ( closeOnCompletion ) { writer . close ( ) ; }
public class MutableRoaringArray { /** * Append copies of the values AFTER a specified key ( may or may not be present ) to end . * @ param highLowContainer the other array * @ param beforeStart given key is the largest key that we won ' t copy */ protected void appendCopiesAfter ( PointableRoaringArray highLowContainer , short beforeStart ) { } }
int startLocation = highLowContainer . getIndex ( beforeStart ) ; if ( startLocation >= 0 ) { startLocation ++ ; } else { startLocation = - startLocation - 1 ; } extendArray ( highLowContainer . size ( ) - startLocation ) ; for ( int i = startLocation ; i < highLowContainer . size ( ) ; ++ i ) { this . keys [ this . size ] = highLowContainer . getKeyAtIndex ( i ) ; this . values [ this . size ] = highLowContainer . getContainerAtIndex ( i ) . clone ( ) ; this . size ++ ; }
public class HadoopPath { /** * TBD : performance , avoid initOffsets */ private byte [ ] resolve0 ( ) { } }
byte [ ] to = new byte [ path . length ] ; int nc = getNameCount ( ) ; int [ ] lastM = new int [ nc ] ; int lastMOff = - 1 ; int m = 0 ; for ( int i = 0 ; i < nc ; i ++ ) { int n = offsets [ i ] ; int len = ( i == offsets . length - 1 ) ? ( path . length - n ) : ( offsets [ i + 1 ] - n - 1 ) ; if ( len == 1 && path [ n ] == ( byte ) '.' ) { if ( m == 0 && path [ 0 ] == '/' ) // absolute path to [ m ++ ] = '/' ; continue ; } if ( len == 2 && path [ n ] == '.' && path [ n + 1 ] == '.' ) { if ( lastMOff >= 0 ) { m = lastM [ lastMOff -- ] ; // retreat continue ; } if ( path [ 0 ] == '/' ) { // " / . . / xyz " skip if ( m == 0 ) to [ m ++ ] = '/' ; } else { // " . . / xyz " - > " . . / xyz " if ( m != 0 && to [ m - 1 ] != '/' ) to [ m ++ ] = '/' ; while ( len -- > 0 ) to [ m ++ ] = path [ n ++ ] ; } continue ; } if ( m == 0 && path [ 0 ] == '/' || // absolute path m != 0 && to [ m - 1 ] != '/' ) { // not the first name to [ m ++ ] = '/' ; } lastM [ ++ lastMOff ] = m ; while ( len -- > 0 ) to [ m ++ ] = path [ n ++ ] ; } if ( m > 1 && to [ m - 1 ] == '/' ) m -- ; return ( m == to . length ) ? to : Arrays . copyOf ( to , m ) ;
public class TimeArrayTimeZoneRule { /** * / * Get UTC of the time with the raw / dst offset */ private long getUTC ( long time , int raw , int dst ) { } }
if ( timeType != DateTimeRule . UTC_TIME ) { time -= raw ; } if ( timeType == DateTimeRule . WALL_TIME ) { time -= dst ; } return time ;
public class DurableOutputHandler { /** * Create a CardinalityInfo reply . * @ param target The target ME for this reply . * @ param reqID The request ID of the original request message . * @ param card The cardinality to record on this message . */ protected static ControlCardinalityInfo createCardinalityInfo ( MessageProcessor MP , SIBUuid8 target , long reqID , int card ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createCardinalityInfo" , new Object [ ] { MP , target , new Long ( reqID ) , new Integer ( card ) } ) ; ControlCardinalityInfo msg = null ; try { // Create and initialize the message msg = MessageProcessor . getControlMessageFactory ( ) . createNewControlCardinalityInfo ( ) ; initializeControlMessage ( MP . getMessagingEngineUuid ( ) , msg , target ) ; // Parameterize for CreateStream msg . setRequestID ( reqID ) ; msg . setCardinality ( card ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.DurableOutputHandler.createCardinalityInfo" , "1:552:1.45.1.1" , DurableOutputHandler . class ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createCardinalityInfo" , msg ) ; return msg ;
public class InstanceRequestMap { /** * Sets both the minimum and the maximum number of instances to be requested from the given instance type . * @ param instanceType * the type of instance to request * @ param number * the minimum and the maximum number of instances to request */ public void setNumberOfInstances ( final InstanceType instanceType , final int number ) { } }
setMinimumNumberOfInstances ( instanceType , number ) ; setMaximumNumberOfInstances ( instanceType , number ) ;
public class PersistenceBrokerImpl { /** * Store all object references that < b > obj < / b > points to . * All objects which we have a FK pointing to ( Via ReferenceDescriptors ) will be * stored if auto - update is true < b > AND < / b > the member field containing the object * reference is NOT null . * With flag < em > ignoreReferences < / em > the storing / linking * of references can be suppressed ( independent of the used auto - update setting ) , * except { @ link org . apache . ojb . broker . metadata . SuperReferenceDescriptor } * these kind of reference ( descriptor ) will always be performed . * @ param obj Object which we will store references for */ private void storeReferences ( Object obj , ClassDescriptor cld , boolean insert , boolean ignoreReferences ) { } }
// get all members of obj that are references and store them Collection listRds = cld . getObjectReferenceDescriptors ( ) ; // return if nothing to do if ( listRds == null || listRds . size ( ) == 0 ) { return ; } Iterator i = listRds . iterator ( ) ; while ( i . hasNext ( ) ) { ObjectReferenceDescriptor rds = ( ObjectReferenceDescriptor ) i . next ( ) ; /* arminw : the super - references ( used for table per subclass inheritance ) must be performed in any case . The " normal " 1:1 references can be ignored when flag " ignoreReferences " is set */ if ( ( ! ignoreReferences && rds . getCascadingStore ( ) != ObjectReferenceDescriptor . CASCADE_NONE ) || rds . isSuperReferenceDescriptor ( ) ) { storeAndLinkOneToOne ( false , obj , cld , rds , insert ) ; } }
public class SwaptionDataLattice { /** * Get a view of the locations of swaptions in this lattice . * The keys of the map are the levels of moneyness for which there are swaptions , sorted in ascending order . * The entries for each moneyness consist of an array of arrays { maturities , tenors } , each sorted in ascending order . * Note , there is no guarantee for the grid per moneyness to be regular . * Hence , getValue may still throw a NullPointerException , even when using entries from this view . * @ return The view of recorded swaptions . */ public Map < Integer , int [ ] [ ] > getGridNodesPerMoneyness ( ) { } }
// See if the map has already been instantiated . if ( keyMap != null ) { return Collections . unmodifiableMap ( keyMap ) ; } // Otherwise create the map and return it . Map < Integer , List < Set < Integer > > > newMap = new HashMap < > ( ) ; for ( DataKey key : entryMap . keySet ( ) ) { if ( ! newMap . containsKey ( key . moneyness ) ) { newMap . put ( key . moneyness , new ArrayList < Set < Integer > > ( ) ) ; newMap . get ( key . moneyness ) . add ( new HashSet < Integer > ( ) ) ; newMap . get ( key . moneyness ) . add ( new HashSet < Integer > ( ) ) ; } newMap . get ( key . moneyness ) . get ( 0 ) . add ( key . maturity ) ; newMap . get ( key . moneyness ) . get ( 1 ) . add ( key . tenor ) ; } Map < Integer , int [ ] [ ] > keyMap = new TreeMap < > ( ) ; for ( int moneyness : newMap . keySet ( ) ) { int [ ] [ ] values = new int [ 2 ] [ ] ; values [ 0 ] = newMap . get ( moneyness ) . get ( 0 ) . stream ( ) . sorted ( ) . mapToInt ( Integer :: intValue ) . toArray ( ) ; values [ 1 ] = newMap . get ( moneyness ) . get ( 1 ) . stream ( ) . sorted ( ) . mapToInt ( Integer :: intValue ) . toArray ( ) ; keyMap . put ( moneyness , values ) ; } this . keyMap = keyMap ; return Collections . unmodifiableMap ( keyMap ) ;
public class Player { /** * Starts the thread of a robot in the player ' s thread group * @ param newRobot the robot to start */ public void startRobot ( Robot newRobot ) { } }
newRobot . getData ( ) . setActiveState ( DEFAULT_START_STATE ) ; Thread newThread = new Thread ( robotsThreads , newRobot , "Bot-" + newRobot . getSerialNumber ( ) ) ; newThread . start ( ) ; // jumpstarts the robot
public class FlowEventService { /** * Stores a single flow event row * @ param event * @ throws IOException */ public void addEvent ( FlowEvent event ) throws IOException { } }
Put p = createPutForEvent ( event ) ; Table eventTable = null ; try { eventTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . FLOW_EVENT_TABLE ) ) ; eventTable . put ( p ) ; } finally { if ( eventTable != null ) { eventTable . close ( ) ; } }
public class ConstructorWriterImpl { /** * { @ inheritDoc } */ @ Override public void setSummaryColumnStyle ( HtmlTree tdTree ) { } }
if ( foundNonPubConstructor ) tdTree . addStyle ( HtmlStyle . colLast ) ; else tdTree . addStyle ( HtmlStyle . colOne ) ;
public class JettyServletWebServerFactory { /** * Return the Jetty { @ link Configuration } s that should be applied to the server . * @ param webAppContext the Jetty { @ link WebAppContext } * @ param initializers the { @ link ServletContextInitializer } s to apply * @ return configurations to apply */ protected Configuration [ ] getWebAppContextConfigurations ( WebAppContext webAppContext , ServletContextInitializer ... initializers ) { } }
List < Configuration > configurations = new ArrayList < > ( ) ; configurations . add ( getServletContextInitializerConfiguration ( webAppContext , initializers ) ) ; configurations . addAll ( getConfigurations ( ) ) ; configurations . add ( getErrorPageConfiguration ( ) ) ; configurations . add ( getMimeTypeConfiguration ( ) ) ; return configurations . toArray ( new Configuration [ 0 ] ) ;
public class CollectionDescriptorConstraints { /** * Ensures that the given collection descriptor has the collection - class property if necessary . * @ param collDef The collection descriptor * @ param checkLevel The current check level ( this constraint is checked in basic ( partly ) and strict ) * @ exception ConstraintException If collection - class is given for an array or if no collection - class is given but required */ private void ensureCollectionClass ( CollectionDescriptorDef collDef , String checkLevel ) throws ConstraintException { } }
if ( CHECKLEVEL_NONE . equals ( checkLevel ) ) { return ; } if ( collDef . hasProperty ( PropertyHelper . OJB_PROPERTY_ARRAY_ELEMENT_CLASS_REF ) ) { // an array cannot have a collection - class specified if ( collDef . hasProperty ( PropertyHelper . OJB_PROPERTY_COLLECTION_CLASS ) ) { throw new ConstraintException ( "Collection " + collDef . getName ( ) + " in class " + collDef . getOwner ( ) . getName ( ) + " is an array but does specify collection-class" ) ; } else { // no further processing necessary as its an array return ; } } if ( CHECKLEVEL_STRICT . equals ( checkLevel ) ) { InheritanceHelper helper = new InheritanceHelper ( ) ; ModelDef model = ( ModelDef ) collDef . getOwner ( ) . getOwner ( ) ; String specifiedClass = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_COLLECTION_CLASS ) ; String variableType = collDef . getProperty ( PropertyHelper . OJB_PROPERTY_VARIABLE_TYPE ) ; try { if ( specifiedClass != null ) { // if we have a specified class then it has to implement the manageable collection and be a sub type of the variable type if ( ! helper . isSameOrSubTypeOf ( specifiedClass , variableType ) ) { throw new ConstraintException ( "The type " + specifiedClass + " specified as collection-class of the collection " + collDef . getName ( ) + " in class " + collDef . getOwner ( ) . getName ( ) + " is not a sub type of the variable type " + variableType ) ; } if ( ! helper . isSameOrSubTypeOf ( specifiedClass , MANAGEABLE_COLLECTION_INTERFACE ) ) { throw new ConstraintException ( "The type " + specifiedClass + " specified as collection-class of the collection " + collDef . getName ( ) + " in class " + collDef . getOwner ( ) . getName ( ) + " does not implement " + MANAGEABLE_COLLECTION_INTERFACE ) ; } } else { // no collection class specified so the variable type has to be a collection type if ( helper . isSameOrSubTypeOf ( variableType , MANAGEABLE_COLLECTION_INTERFACE ) ) { // we can specify it as a collection - class as it is an manageable collection collDef . setProperty ( PropertyHelper . OJB_PROPERTY_COLLECTION_CLASS , variableType ) ; } else if ( ! helper . isSameOrSubTypeOf ( variableType , JAVA_COLLECTION_INTERFACE ) ) { throw new ConstraintException ( "The collection " + collDef . getName ( ) + " in class " + collDef . getOwner ( ) . getName ( ) + " needs the collection-class attribute as its variable type does not implement " + JAVA_COLLECTION_INTERFACE ) ; } } } catch ( ClassNotFoundException ex ) { throw new ConstraintException ( "Could not find the class " + ex . getMessage ( ) + " on the classpath while checking the collection " + collDef . getName ( ) + " in class " + collDef . getOwner ( ) . getName ( ) ) ; } }
public class BaseFont { /** * Gets the width of a < CODE > String < / CODE > in points taking kerning * into account . * @ param text the < CODE > String < / CODE > to get the width of * @ param fontSize the font size * @ return the width in points */ public float getWidthPointKerned ( String text , float fontSize ) { } }
float size = getWidth ( text ) * 0.001f * fontSize ; if ( ! hasKernPairs ( ) ) return size ; int len = text . length ( ) - 1 ; int kern = 0 ; char c [ ] = text . toCharArray ( ) ; for ( int k = 0 ; k < len ; ++ k ) { kern += getKerning ( c [ k ] , c [ k + 1 ] ) ; } return size + kern * 0.001f * fontSize ;
public class ReservoirItemsUnion { /** * This either merges sketchIn into gadget _ or gadget _ into sketchIn . If merging into sketchIn * with isModifiable set to false , copies elements from sketchIn first , leaving original * unchanged . * @ param sketchIn Sketch with new samples from which to draw * @ param isModifiable Flag indicating whether sketchIn can be modified ( e . g . if it was rebuild * from Memory ) */ private void twoWayMergeInternal ( final ReservoirItemsSketch < T > sketchIn , final boolean isModifiable ) { } }
if ( sketchIn . getN ( ) <= sketchIn . getK ( ) ) { twoWayMergeInternalStandard ( sketchIn ) ; } else if ( gadget_ . getN ( ) < gadget_ . getK ( ) ) { // merge into sketchIn , so swap first final ReservoirItemsSketch < T > tmpSketch = gadget_ ; gadget_ = ( isModifiable ? sketchIn : sketchIn . copy ( ) ) ; twoWayMergeInternalStandard ( tmpSketch ) ; } else if ( sketchIn . getImplicitSampleWeight ( ) < ( gadget_ . getN ( ) / ( ( double ) ( gadget_ . getK ( ) - 1 ) ) ) ) { // implicit weights in sketchIn are light enough to merge into gadget twoWayMergeInternalWeighted ( sketchIn ) ; } else { // Use next line as an assert / exception ? // gadget _ . getImplicitSampleWeight ( ) < sketchIn . getN ( ) / ( ( double ) ( sketchIn . getK ( ) - 1 ) ) ) { // implicit weights in gadget are light enough to merge into sketchIn // merge into sketchIn , so swap first final ReservoirItemsSketch < T > tmpSketch = gadget_ ; gadget_ = ( isModifiable ? sketchIn : sketchIn . copy ( ) ) ; twoWayMergeInternalWeighted ( tmpSketch ) ; }
public class ObjectToJsonConverter { /** * Setup the context with the limits given in the request or with the default limits if not . In all cases , * hard limits as defined in the servlet configuration are never exceeded . * @ param pOpts options used for parsing . */ void setupContext ( JsonConvertOptions pOpts ) { } }
ObjectSerializationContext stackContext = new ObjectSerializationContext ( pOpts ) ; stackContextLocal . set ( stackContext ) ;
public class CatalogUtil { /** * Read a hashed password from password . * SHA * hash it once to match what we will get from the wire protocol * and then hex encode it */ private static String extractPassword ( String password , ClientAuthScheme scheme ) { } }
MessageDigest md = null ; try { md = MessageDigest . getInstance ( ClientAuthScheme . getDigestScheme ( scheme ) ) ; } catch ( final NoSuchAlgorithmException e ) { hostLog . l7dlog ( Level . FATAL , LogKeys . compiler_VoltCompiler_NoSuchAlgorithm . name ( ) , e ) ; System . exit ( - 1 ) ; } final byte passwordHash [ ] = md . digest ( password . getBytes ( Charsets . UTF_8 ) ) ; return Encoder . hexEncode ( passwordHash ) ;
public class GregorianCalendar { /** * Set hour , minutes , second and milliseconds to zero . * @ param d Date * @ return Modified date */ @ SuppressWarnings ( "deprecation" ) private Date setHourToZero ( Date in ) { } }
final Date d = new Date ( in . getTime ( ) ) ; d . setHours ( 0 ) ; d . setMinutes ( 0 ) ; d . setSeconds ( 0 ) ; // a trick to set milliseconds to zero long t = d . getTime ( ) / 1000 ; t = t * 1000 ; return new Date ( t ) ;
public class RunnersApi { /** * Remove a runner . * < pre > < code > GitLab Endpoint : DELETE / runners / : id < / code > < / pre > * @ param runnerId The ID of a runner * @ throws GitLabApiException if any exception occurs */ public void removeRunner ( Integer runnerId ) throws GitLabApiException { } }
if ( runnerId == null ) { throw new RuntimeException ( "runnerId cannot be null" ) ; } delete ( Response . Status . NO_CONTENT , null , "runners" , runnerId ) ;
public class Keys { /** * Tests whether the * " Java Cryptography Extension ( JCE ) Unlimited Strength Jurisdiction Policy Files " is * correctly installed . * @ return If strong keys can be used , true , otherwise false . */ public static boolean canUseStrongKeys ( ) { } }
try { int maxKeyLen = Cipher . getMaxAllowedKeyLength ( Crypto . CIPHER_ALGORITHM ) ; return maxKeyLen > 128 ; } catch ( NoSuchAlgorithmException e ) { throw new IllegalStateException ( "Algorithm unavailable: " + Crypto . CIPHER_ALGORITHM , e ) ; }
public class JCusparse { /** * Description : Wrapper that un - sorts sparse matrix stored in CSR format * ( without exposing the permutation ) . */ public static int cusparseScsr2csru ( cusparseHandle handle , int m , int n , int nnz , cusparseMatDescr descrA , Pointer csrVal , Pointer csrRowPtr , Pointer csrColInd , csru2csrInfo info , Pointer pBuffer ) { } }
return checkResult ( cusparseScsr2csruNative ( handle , m , n , nnz , descrA , csrVal , csrRowPtr , csrColInd , info , pBuffer ) ) ;
public class DatasourceJBossASClient { /** * Checks to see if there is already a datasource with the given name . * @ param datasourceName the name to check * @ return true if there is a datasource with the given name already in existence * @ throws Exception any error */ public boolean isDatasource ( String datasourceName ) throws Exception { } }
Address addr = Address . root ( ) . add ( SUBSYSTEM , SUBSYSTEM_DATASOURCES ) ; String haystack = DATA_SOURCE ; return null != findNodeInList ( addr , haystack , datasourceName ) ;