signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PropertiesLoaderBuilder { /** * Adds a new property . Giving both name and value . * This methods does not lookup in the Spring Context , it only adds property and value as given . * @ param name to be added in the properties * @ param value to be added in the properties * @ return PropertyLoaderBuilder to continue the builder chain */ public PropertiesLoaderBuilder addProperty ( String name , String value ) { } }
props . put ( name , value ) ; return this ;
public class ExtendedByteBuf { /** * Reads a variable size int if possible . If not present the reader index is reset to the last mark . * @ param bf * @ return */ public static Optional < Integer > readMaybeVInt ( ByteBuf bf ) { } }
if ( bf . readableBytes ( ) >= 1 ) { byte b = bf . readByte ( ) ; return read ( bf , b , 7 , b & 0x7F , 1 ) ; } else { bf . resetReaderIndex ( ) ; return Optional . empty ( ) ; }
public class TimeZoneRule { /** * Returns if this rule represents the same rule and offsets as another . * When two < code > TimeZoneRule < / code > objects differ only its names , this method returns * true . * @ param other The < code > TimeZoneRule < / code > object to be compared with . * @ return true if the other < code > TimeZoneRule < / code > is the same as this one . */ public boolean isEquivalentTo ( TimeZoneRule other ) { } }
if ( rawOffset == other . rawOffset && dstSavings == other . dstSavings ) { return true ; } return false ;
public class ImageCodeUtils { /** * 输出随机验证码图片流 , 并返回验证码值 * @ param w * @ param h * @ param os * @ param verifySize * @ return * @ throws IOException */ public static String outputVerifyImage ( int w , int h , OutputStream os , int verifySize ) throws IOException { } }
String verifyCode = generateVerifyCode ( verifySize ) ; outputImage ( w , h , os , verifyCode ) ; return verifyCode ;
public class EclipseNode { /** * Visits this node and all child nodes depth - first , calling the provided visitor ' s visit methods . */ public void traverse ( EclipseASTVisitor visitor ) { } }
if ( visitor . isDeferUntilPostDiet ( ) && ! isCompleteParse ( ) ) return ; switch ( getKind ( ) ) { case COMPILATION_UNIT : visitor . visitCompilationUnit ( this , ( CompilationUnitDeclaration ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitCompilationUnit ( this , ( CompilationUnitDeclaration ) get ( ) ) ; break ; case TYPE : visitor . visitType ( this , ( TypeDeclaration ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitType ( this , ( TypeDeclaration ) get ( ) ) ; break ; case FIELD : visitor . visitField ( this , ( FieldDeclaration ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitField ( this , ( FieldDeclaration ) get ( ) ) ; break ; case INITIALIZER : visitor . visitInitializer ( this , ( Initializer ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitInitializer ( this , ( Initializer ) get ( ) ) ; break ; case METHOD : if ( get ( ) instanceof Clinit ) return ; visitor . visitMethod ( this , ( AbstractMethodDeclaration ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitMethod ( this , ( AbstractMethodDeclaration ) get ( ) ) ; break ; case ARGUMENT : AbstractMethodDeclaration method = ( AbstractMethodDeclaration ) up ( ) . get ( ) ; visitor . visitMethodArgument ( this , ( Argument ) get ( ) , method ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitMethodArgument ( this , ( Argument ) get ( ) , method ) ; break ; case LOCAL : visitor . visitLocal ( this , ( LocalDeclaration ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitLocal ( this , ( LocalDeclaration ) get ( ) ) ; break ; case ANNOTATION : switch ( up ( ) . getKind ( ) ) { case TYPE : visitor . visitAnnotationOnType ( ( TypeDeclaration ) up ( ) . get ( ) , this , ( Annotation ) get ( ) ) ; break ; case FIELD : visitor . visitAnnotationOnField ( ( FieldDeclaration ) up ( ) . get ( ) , this , ( Annotation ) get ( ) ) ; break ; case METHOD : visitor . visitAnnotationOnMethod ( ( AbstractMethodDeclaration ) up ( ) . get ( ) , this , ( Annotation ) get ( ) ) ; break ; case ARGUMENT : visitor . visitAnnotationOnMethodArgument ( ( Argument ) parent . get ( ) , ( AbstractMethodDeclaration ) parent . directUp ( ) . get ( ) , this , ( Annotation ) get ( ) ) ; break ; case LOCAL : visitor . visitAnnotationOnLocal ( ( LocalDeclaration ) parent . get ( ) , this , ( Annotation ) get ( ) ) ; break ; case TYPE_USE : visitor . visitAnnotationOnTypeUse ( ( TypeReference ) parent . get ( ) , this , ( Annotation ) get ( ) ) ; break ; default : throw new AssertionError ( "Annotation not expected as child of a " + up ( ) . getKind ( ) ) ; } break ; case TYPE_USE : visitor . visitTypeUse ( this , ( TypeReference ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitTypeUse ( this , ( TypeReference ) get ( ) ) ; break ; case STATEMENT : visitor . visitStatement ( this , ( Statement ) get ( ) ) ; ast . traverseChildren ( visitor , this ) ; visitor . endVisitStatement ( this , ( Statement ) get ( ) ) ; break ; default : throw new AssertionError ( "Unexpected kind during node traversal: " + getKind ( ) ) ; }
public class FastConcurrentDirectDeque { /** * Returns the first node , the unique node p for which : * p . prev = = null & & p . next ! = p * The returned node may or may not be logically deleted . * Guarantees that head is set to the returned node . */ Node < E > first ( ) { } }
restartFromHead : for ( ; ; ) for ( Node < E > h = head , p = h , q ; ; ) { if ( ( q = p . prev ) != null && ( q = ( p = q ) . prev ) != null ) // Check for head updates every other hop . // If p = = q , we are sure to follow head instead . p = ( h != ( h = head ) ) ? h : q ; else if ( p == h // It is possible that p is PREV _ TERMINATOR , // but if so , the CAS is guaranteed to fail . || HEAD . compareAndSet ( this , h , p ) ) return p ; else continue restartFromHead ; }
public class Hex { /** * Converts an array of bytes into an array of characters representing the hexidecimal values of each byte in order . * The returned array will be double the length of the passed array , as it takes two characters to represent any * given byte . * @ param data * a byte [ ] to convert to Hex characters * @ return A char [ ] containing hexidecimal characters */ public static char [ ] encodeHex ( byte [ ] data ) { } }
int l = data . length ; char [ ] out = new char [ l << 1 ] ; // two characters form the hex value . for ( int i = 0 , j = 0 ; i < l ; i ++ ) { out [ j ++ ] = DIGITS [ ( 0xF0 & data [ i ] ) >>> 4 ] ; out [ j ++ ] = DIGITS [ 0x0F & data [ i ] ] ; } return out ;
public class ModuleEnvironments { /** * Fetch an environment with a given { @ code environmentId } and space . * This method will override the configuration specified through * { @ link CMAClient . Builder # setSpaceId ( String ) } . * @ param spaceId space ID * @ param environmentId environment ID * @ return { @ link CMAEnvironment } result instance * @ throws IllegalArgumentException if space id is null . * @ throws IllegalArgumentException if environment ' s id is null . */ public CMAEnvironment fetchOne ( String spaceId , String environmentId ) { } }
assertNotNull ( spaceId , "spaceId" ) ; assertNotNull ( environmentId , "environmentId" ) ; return service . fetchOne ( spaceId , environmentId ) . blockingFirst ( ) ;
public class SecurityContextImpl { /** * Perform a login to recreate the full subject , given a WSPrincipal * @ param wsPrincipal the deserialized WSPrincipal that will be used for creating the new subject * @ param securityService the security service to use for authenticating the user * @ param AtomicServiceReference < UnauthenticatedSubjectService > reference to the unauthenticated subject service for creating the unauthenticated subject * @ param customCacheKey The custom cache key to look up the subject * @ return the authenticated subject , or unauthenticated if there was an error during authentication or wsPrincipal was null */ @ FFDCIgnore ( AuthenticationException . class ) protected Subject recreateFullSubject ( WSPrincipal wsPrincipal , SecurityService securityService , AtomicServiceReference < UnauthenticatedSubjectService > unauthenticatedSubjectServiceRef , String customCacheKey ) { } }
Subject subject = null ; if ( wsPrincipal != null ) { String userName = wsPrincipal . getName ( ) ; AuthenticateUserHelper authHelper = new AuthenticateUserHelper ( ) ; if ( jaasLoginContextEntry == null ) { jaasLoginContextEntry = DESERIALIZE_LOGINCONTEXT_DEFAULT ; } try { subject = authHelper . authenticateUser ( securityService . getAuthenticationService ( ) , userName , jaasLoginContextEntry , customCacheKey ) ; } catch ( AuthenticationException e ) { Tr . error ( tc , "SEC_CONTEXT_DESERIALIZE_AUTHN_ERROR" , new Object [ ] { e . getLocalizedMessage ( ) } ) ; } } if ( subject == null ) { subject = unauthenticatedSubjectServiceRef . getService ( ) . getUnauthenticatedSubject ( ) ; } return subject ;
public class Utils { /** * Get the current time in microseconds since the epoch . This method is synchronized * and guarantees that each successive call , even by different threads , returns * increasing values . * @ return Current time in microseconds ( though not necessarily with microsecond * precision ) . */ public static long getTimeMicros ( ) { } }
// Use use a dedicated lock object rather than synchronizing on the method , which // would synchronize on the Utils . class object , which is too coarse - grained . synchronized ( g_lastMicroLock ) { // We use System . currentTimeMillis ( ) * 1000 for compatibility with the CLI and // other tools . This makes our timestamps " milliseconds since the epoch " . long newValue = System . currentTimeMillis ( ) * 1000 ; if ( newValue <= g_lastMicroValue ) { // Either two threads called us very quickly or the system clock was set // back a little . Just return the last value allocated + 1 . Eventually , // the system clock will catch up . newValue = g_lastMicroValue + 1 ; } g_lastMicroValue = newValue ; return newValue ; }
public class JsonPullParser { /** * Creates a new parser , using the given InputStream as its { @ code JSON } feed . * Please call one of the { @ code setSource ( . . . ) } ' s before calling other methods . * @ param is * An InputStream serves as { @ code JSON } feed . Cannot be null . * @ param charsetName * The character set should be assumed in which in the stream are encoded . { @ link # DEFAULT _ CHARSET _ NAME } is assumed if null is passed . * @ return { @ link JsonPullParser } * @ throws UnsupportedEncodingException * An unknown character set is specified . * @ throws IllegalArgumentException * { @ code null } has been passed in where not applicable . */ public static JsonPullParser newParser ( InputStream is , String charsetName ) throws UnsupportedEncodingException { } }
if ( is == null ) { throw new IllegalArgumentException ( "'is' must not be null." ) ; } try { final Charset charset = ( charsetName == null ) ? null : Charset . forName ( charsetName ) ; return newParser ( is , charset ) ; } catch ( UnsupportedCharsetException e ) { throw new UnsupportedEncodingException ( e . getCharsetName ( ) ) ; }
public class OpenVidu { /** * Stops the recording of a { @ link io . openvidu . java . client . Session } * @ param recordingId The id property of the recording you want to stop * @ return The stopped recording * @ throws OpenViduJavaClientException * @ throws OpenViduHttpException Value returned from * { @ link io . openvidu . java . client . OpenViduHttpException # getStatus ( ) } * < ul > * < li > < code > 404 < / code > : no recording exists * for the passed < i > recordingId < / i > < / li > * < li > < code > 406 < / code > : recording has * < i > starting < / i > status . Wait until * < i > started < / i > status before stopping the * recording < / li > * < / ul > */ public Recording stopRecording ( String recordingId ) throws OpenViduJavaClientException , OpenViduHttpException { } }
HttpPost request = new HttpPost ( OpenVidu . urlOpenViduServer + API_RECORDINGS + API_RECORDINGS_STOP + "/" + recordingId ) ; HttpResponse response ; try { response = OpenVidu . httpClient . execute ( request ) ; } catch ( IOException e ) { throw new OpenViduJavaClientException ( e . getMessage ( ) , e . getCause ( ) ) ; } try { int statusCode = response . getStatusLine ( ) . getStatusCode ( ) ; if ( ( statusCode == org . apache . http . HttpStatus . SC_OK ) ) { Recording r = new Recording ( httpResponseToJson ( response ) ) ; Session activeSession = OpenVidu . activeSessions . get ( r . getSessionId ( ) ) ; if ( activeSession != null ) { activeSession . setIsBeingRecorded ( false ) ; } else { log . warn ( "No active session found for sessionId '" + r . getSessionId ( ) + "'. This instance of OpenVidu Java Client didn't create this session" ) ; } return r ; } else { throw new OpenViduHttpException ( statusCode ) ; } } finally { EntityUtils . consumeQuietly ( response . getEntity ( ) ) ; }
public class PredictionsImpl { /** * Predict an image url without saving the result . * @ param projectId The project id * @ param predictImageUrlWithNoStoreOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ImagePrediction > predictImageUrlWithNoStoreAsync ( UUID projectId , PredictImageUrlWithNoStoreOptionalParameter predictImageUrlWithNoStoreOptionalParameter , final ServiceCallback < ImagePrediction > serviceCallback ) { } }
return ServiceFuture . fromResponse ( predictImageUrlWithNoStoreWithServiceResponseAsync ( projectId , predictImageUrlWithNoStoreOptionalParameter ) , serviceCallback ) ;
public class LabsInner { /** * Register to managed lab . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > registerAsync ( String resourceGroupName , String labAccountName , String labName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( registerWithServiceResponseAsync ( resourceGroupName , labAccountName , labName ) , serviceCallback ) ;
public class AWSSimpleSystemsManagementClient { /** * Returns a summary count of compliant and non - compliant resources for a compliance type . For example , this call * can return State Manager associations , patches , or custom compliance types according to the filter criteria that * you specify . * @ param listComplianceSummariesRequest * @ return Result of the ListComplianceSummaries operation returned by the service . * @ throws InvalidFilterException * The filter name is not valid . Verify the you entered the correct name and try again . * @ throws InvalidNextTokenException * The specified token is not valid . * @ throws InternalServerErrorException * An error occurred on the server side . * @ sample AWSSimpleSystemsManagement . ListComplianceSummaries * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ssm - 2014-11-06 / ListComplianceSummaries " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListComplianceSummariesResult listComplianceSummaries ( ListComplianceSummariesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListComplianceSummaries ( request ) ;
public class NNF { /** * Conversion of non - class expressions to NNF */ @ Override public OWLAxiom visit ( OWLSubClassOfAxiom axiom ) { } }
return dataFactory . getOWLSubClassOfAxiom ( axiom . getSubClass ( ) . accept ( this ) , axiom . getSuperClass ( ) . accept ( this ) ) ;
public class StaticSentinel { /** * this method is registering the termination of a method . */ public synchronized static void leaving ( String classname , int fnr2 ) { } }
int fn = fnr2 ; m . put ( classname , StaticOperationsCounters . fin [ fn ] ++ ) ; // fin [ fn ] + + ; / / changes the # fin counter adding one to it . m . put ( classname , StaticOperationsCounters . active [ fn ] -- ) ; // changes the # active counter removing one to it . stateChanged ( ) ;
public class RaftNodeImpl { /** * Schedules task using { @ link RaftIntegration # schedule ( Runnable , long , TimeUnit ) } . */ public void schedule ( Runnable task , long delayInMillis ) { } }
if ( isTerminatedOrSteppedDown ( ) ) { return ; } raftIntegration . schedule ( task , delayInMillis , MILLISECONDS ) ;
public class AllWindowedStream { /** * Applies the given window function to each window . The window function is called for each * evaluation of the window . The output of the window function is * interpreted as a regular non - windowed stream . * < p > Note that this function requires that all data in the windows is buffered until the window * is evaluated , as the function provides no means of incremental aggregation . * @ param function The process window function . * @ return The data stream that is the result of applying the window function to the window . */ @ PublicEvolving public < R > SingleOutputStreamOperator < R > process ( ProcessAllWindowFunction < T , R , W > function ) { } }
String callLocation = Utils . getCallLocationName ( ) ; function = input . getExecutionEnvironment ( ) . clean ( function ) ; TypeInformation < R > resultType = getProcessAllWindowFunctionReturnType ( function , getInputType ( ) ) ; return apply ( new InternalIterableProcessAllWindowFunction < > ( function ) , resultType , callLocation ) ;
public class Interest { /** * Checks whether given object is of interest to this interest instance . * @ param action the action to be performed on the object * @ param object the object to check * @ return true if the object is of interest to this , false otherwise */ public boolean matches ( Action < ? , ? > action , Object object ) { } }
return this . action == action && object != null && entityType . isAssignableFrom ( object . getClass ( ) ) ;
public class Trie { /** * Add a key with associated value to the trie . * @ param key the key . * @ param value the value . */ public void put ( K [ ] key , V value ) { } }
Node child = root . get ( key [ 0 ] ) ; if ( child == null ) { child = new Node ( key [ 0 ] ) ; root . put ( key [ 0 ] , child ) ; } child . addChild ( key , value , 1 ) ;
public class AbstractBeanDefinition { /** * Obtains a bean definition for the field at the given index and the argument at the given index * Warning : this method is used by internal generated code and should not be called by user code . * @ param resolutionContext The resolution context * @ param context The context * @ param injectionPoint The field injection point * @ return The resolved bean */ @ SuppressWarnings ( "WeakerAccess" ) @ Internal protected final Collection getBeansOfTypeForField ( BeanResolutionContext resolutionContext , BeanContext context , FieldInjectionPoint injectionPoint ) { } }
return resolveBeanWithGenericsForField ( resolutionContext , injectionPoint , ( beanType , qualifier ) -> { boolean hasNoGenerics = ! injectionPoint . getType ( ) . isArray ( ) && injectionPoint . asArgument ( ) . getTypeVariables ( ) . isEmpty ( ) ; if ( hasNoGenerics ) { return ( ( DefaultBeanContext ) context ) . getBean ( resolutionContext , beanType , qualifier ) ; } else { return ( ( DefaultBeanContext ) context ) . getBeansOfType ( resolutionContext , beanType , qualifier ) ; } } ) ;
public class CommonOps_DDF3 { /** * Returns the absolute value of the element in the matrix that has the largest absolute value . < br > * < br > * Max { | a < sub > ij < / sub > | } for all i and j < br > * @ param a A matrix . Not modified . * @ return The max abs element value of the matrix . */ public static double elementMaxAbs ( DMatrix3x3 a ) { } }
double max = Math . abs ( a . a11 ) ; double tmp = Math . abs ( a . a12 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a13 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a21 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a22 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a23 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a31 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a32 ) ; if ( tmp > max ) max = tmp ; tmp = Math . abs ( a . a33 ) ; if ( tmp > max ) max = tmp ; return max ;
public class Parser { /** * literal : = & lt ; TRUE & gt ; | & lt ; FALSE & gt ; | & lt ; STRING & gt ; | & lt ; INTEGER & gt ; | & lt ; FLOAT & gt ; | & lt ; NULL & gt ; */ protected AstNode literal ( ) throws ScanException , ParseException { } }
AstNode v = null ; switch ( token . getSymbol ( ) ) { case TRUE : v = new AstBoolean ( true ) ; consumeToken ( ) ; break ; case FALSE : v = new AstBoolean ( false ) ; consumeToken ( ) ; break ; case STRING : v = new AstString ( token . getImage ( ) ) ; consumeToken ( ) ; break ; case INTEGER : v = new AstNumber ( parseInteger ( token . getImage ( ) ) ) ; consumeToken ( ) ; break ; case FLOAT : v = new AstNumber ( parseFloat ( token . getImage ( ) ) ) ; consumeToken ( ) ; break ; case NULL : v = new AstNull ( ) ; consumeToken ( ) ; break ; case EXTENSION : if ( getExtensionHandler ( token ) . getExtensionPoint ( ) == ExtensionPoint . LITERAL ) { v = getExtensionHandler ( consumeToken ( ) ) . createAstNode ( ) ; break ; } } return v ;
public class Reporter { /** * Creates the directory and file to hold the test output file */ private void setupFile ( ) { } }
if ( ! new File ( directory ) . exists ( ) && ! new File ( directory ) . mkdirs ( ) ) { try { throw new IOException ( "Unable to create output directory" ) ; } catch ( IOException e ) { log . info ( e ) ; } } if ( ! file . exists ( ) ) { try { if ( ! file . createNewFile ( ) ) { throw new IOException ( "Unable to create output file" ) ; } } catch ( IOException e ) { log . error ( e ) ; } }
public class ExecuteMethodValidatorChecker { public void checkMismatchedValidatorAnnotation ( Field field , Map < String , Class < ? > > genericMap ) { } }
doCheckMismatchedValidatorAnnotation ( field , genericMap ) ;
public class TableUtils { /** * Waits up to 10 minutes for a specified DynamoDB table to move into the * < code > ACTIVE < / code > state . If the table does not exist or does not * transition to the < code > ACTIVE < / code > state after this time , then * SdkClientException is thrown . * @ param dynamo * The DynamoDB client to use to make requests . * @ param tableName * The name of the table whose status is being checked . * @ throws TableNeverTransitionedToStateException * If the specified table does not exist or does not transition * into the < code > ACTIVE < / code > state before this method times * out and stops polling . * @ throws InterruptedException * If the thread is interrupted while waiting for the table to * transition into the < code > ACTIVE < / code > state . */ public static void waitUntilActive ( final AmazonDynamoDB dynamo , final String tableName ) throws InterruptedException , TableNeverTransitionedToStateException { } }
waitUntilActive ( dynamo , tableName , DEFAULT_WAIT_TIMEOUT , DEFAULT_WAIT_INTERVAL ) ;
public class AbstractBufferedAttributeProvider { /** * Replies the value associated to the specified name . */ private AttributeValue extractValueFor ( String name ) throws AttributeException { } }
final AttributeValue value ; if ( this . cache . containsKey ( name ) ) { value = this . cache . get ( name ) ; } else { value = loadValue ( name ) ; this . cache . put ( name , value ) ; } return value ;
public class PercentLayoutHelper { /** * Constructs a PercentLayoutInfo from attributes associated with a View . Call this method from * { @ code LayoutParams ( Context c , AttributeSet attrs ) } constructor . */ public static PercentLayoutInfo getPercentLayoutInfo ( Context context , AttributeSet attrs ) { } }
PercentLayoutInfo info = null ; TypedArray array = context . obtainStyledAttributes ( attrs , R . styleable . Carbon ) ; float value = array . getFraction ( R . styleable . Carbon_carbon_widthPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent width: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . widthPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_heightPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent height: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . heightPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . leftMarginPercent = value ; info . topMarginPercent = value ; info . rightMarginPercent = value ; info . bottomMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginLeftPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent left margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . leftMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginTopPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent top margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . topMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginRightPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent right margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . rightMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginBottomPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent bottom margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . bottomMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginStartPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent start margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . startMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_marginEndPercent , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "percent end margin: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . endMarginPercent = value ; } value = array . getFraction ( R . styleable . Carbon_carbon_aspectRatio , 1 , 1 , - 1f ) ; if ( value != - 1f ) { if ( Log . isLoggable ( TAG , Log . VERBOSE ) ) { Log . v ( TAG , "aspect ratio: " + value ) ; } info = info != null ? info : new PercentLayoutInfo ( ) ; info . aspectRatio = value ; } array . recycle ( ) ; if ( Log . isLoggable ( TAG , Log . DEBUG ) ) { Log . d ( TAG , "constructed: " + info ) ; } return info ;
public class POIUtils { /** * シートの種類を判定する 。 * @ since 2.0 * @ param sheet 判定対象のオブジェクト * @ return シートの種類 。 不明な場合はnullを返す 。 * @ throws IllegalArgumentException { @ literal sheet = = null } */ public static SpreadsheetVersion getVersion ( final Sheet sheet ) { } }
ArgUtils . notNull ( sheet , "sheet" ) ; if ( sheet instanceof HSSFSheet ) { return SpreadsheetVersion . EXCEL97 ; } else if ( sheet instanceof XSSFSheet ) { return SpreadsheetVersion . EXCEL2007 ; } return null ;
public class DataConsumer { /** * Creates a future that will send a request to the reporting host and call * the handler with the response * @ param path the path at the reporting host which the request will be made * @ param reportingHandler the handler to receive the response once executed * and recieved * @ return a { @ link java . util . concurrent . Future } for handing the request */ public Future < String > sendRequest ( final String path , final ReportingHandler reportingHandler ) { } }
return threadPool . submit ( new Callable < String > ( ) { @ Override public String call ( ) { String response = getResponse ( path ) ; if ( reportingHandler != null ) { reportingHandler . handleResponse ( response ) ; } return response ; } } ) ;
public class ProxyTask { /** * Constructor . */ public void init ( BasicServlet servlet , BasicServlet . SERVLET_TYPE servletType ) { } }
super . init ( servlet , servletType ) ; // First see if this is an active session if ( m_application == null ) m_application = this . getNonUserApplication ( ) ; // This task belongs to the servlet . m_application . addTask ( this , null ) ;
public class JLanguageTool { /** * Activate rules that depend on a language model . The language model currently * consists of Lucene indexes with ngram occurrence counts . * @ param indexDir directory with a ' 3grams ' sub directory which contains a Lucene index with 3gram occurrence counts * @ since 2.7 */ public void activateLanguageModelRules ( File indexDir ) throws IOException { } }
LanguageModel languageModel = language . getLanguageModel ( indexDir ) ; if ( languageModel != null ) { ResourceBundle messages = getMessageBundle ( language ) ; List < Rule > rules = language . getRelevantLanguageModelRules ( messages , languageModel ) ; userRules . addAll ( rules ) ; updateOptionalLanguageModelRules ( languageModel ) ; }
public class WorkbenchEntrySet { /** * Rehashes the set to a new size . * @ param newN the new size . */ protected void rehash ( final int newN ) { } }
int i = 0 , pos ; final WorkbenchEntry [ ] workbenchEntry = this . workbenchEntry ; final int newMask = newN - 1 ; final WorkbenchEntry [ ] newWorkbenchEntry = new WorkbenchEntry [ newN ] ; for ( int j = size ; j -- != 0 ; ) { while ( workbenchEntry [ i ] == null ) i ++ ; WorkbenchEntry e = workbenchEntry [ i ] ; pos = hashCode ( e . ipAddress ) & newMask ; while ( newWorkbenchEntry [ pos ] != null ) pos = ( pos + 1 ) & newMask ; newWorkbenchEntry [ pos ] = e ; i ++ ; } n = newN ; mask = newMask ; maxFill = 3 * ( n / 4 ) ; this . workbenchEntry = newWorkbenchEntry ;
public class JNotePartOfGroup { /** * / * public Point2D getStemBegin ( ) { * return new Point2D . Double ( stemX , stemYBegin ) ; */ public Rectangle2D getBoundingBox ( ) { } }
Dimension glyphDimension = getMetrics ( ) . getGlyphDimension ( getNotationContext ( ) ) ; if ( isStemUp ( ) ) { return new Rectangle2D . Double ( ( int ) ( getBase ( ) . getX ( ) ) , stemYEnd , getWidth ( ) , getStemBeginPosition ( ) . getY ( ) - stemYEnd + glyphDimension . getHeight ( ) / 2 ) ; } else { return new Rectangle2D . Double ( ( int ) ( getBase ( ) . getX ( ) ) , getStemBeginPosition ( ) . getY ( ) - glyphDimension . getHeight ( ) / 2 , getWidth ( ) , stemYEnd - getStemBeginPosition ( ) . getY ( ) + 1 + glyphDimension . getHeight ( ) / 2 ) ; }
public class AccessibilityNodeInfoUtils { /** * Returns whether a node is long clickable . That is , the node supports at least one of the * following : * < ul > * < li > { @ link AccessibilityNodeInfoCompat # isLongClickable ( ) } < / li > * < li > { @ link AccessibilityNodeInfoCompat # ACTION _ LONG _ CLICK } < / li > * < / ul > * @ param node The node to examine . * @ return { @ code true } if node is long clickable . */ public static boolean isLongClickable ( AccessibilityNodeInfoCompat node ) { } }
if ( node == null ) { return false ; } if ( node . isLongClickable ( ) ) { return true ; } return supportsAnyAction ( node , AccessibilityNodeInfoCompat . ACTION_LONG_CLICK ) ;
public class GeoShapeCondition { /** * { @ inheritDoc } */ @ Override public Query query ( Schema schema ) { } }
if ( field == null || field . trim ( ) . isEmpty ( ) ) { throw new IllegalArgumentException ( "Field name required" ) ; } if ( shape == null ) { throw new IllegalArgumentException ( "Geo shape required" ) ; } if ( operator == null ) { throw new IllegalArgumentException ( "Geo operator required" ) ; } ColumnMapper columnMapper = schema . getMapper ( field ) ; if ( columnMapper == null || ! ( columnMapper instanceof GeoShapeMapper ) ) { throw new IllegalArgumentException ( "Not mapper found" ) ; } GeoShapeMapper mapper = ( GeoShapeMapper ) columnMapper ; SpatialContext spatialContext = mapper . getSpatialContext ( ) ; SpatialStrategy spatialStrategy = mapper . getStrategy ( field ) ; SpatialArgs args = new SpatialArgs ( operator . getSpatialOperation ( ) , shape . toSpatial4j ( spatialContext ) ) ; Query query = spatialStrategy . makeQuery ( args ) ; query . setBoost ( boost ) ; return query ;
public class SARLPreferences { /** * Replies the output path for the generated sources that is registered inside the project ' s preferences . * If the project has no specific configuration , replies < code > null < / code > . * @ param project the project . * @ return the output path for SARL compiler if the project has a specific configuration , * otherwise < code > null < / code > . */ public static IPath getSARLOutputPathFor ( IProject project ) { } }
assert project != null ; final IPreferenceStore preferenceStore = getSARLPreferencesFor ( project ) ; if ( preferenceStore . getBoolean ( IS_PROJECT_SPECIFIC ) ) { String key ; for ( final OutputConfiguration projectConfiguration : getXtextConfigurationsFor ( project ) ) { key = BuilderPreferenceAccess . getKey ( projectConfiguration , EclipseOutputConfigurationProvider . OUTPUT_DIRECTORY ) ; final String path = preferenceStore . getString ( key ) ; if ( ! Strings . isNullOrEmpty ( path ) ) { return Path . fromOSString ( path ) ; } } } return null ;
public class DOMConfigurator { /** * Used internally to parse an { @ link ErrorHandler } element . */ protected void parseErrorHandler ( Element element , Appender appender ) { } }
ErrorHandler eh = ( ErrorHandler ) OptionConverter . instantiateByClassName ( subst ( element . getAttribute ( CLASS_ATTR ) ) , org . apache . log4j . spi . ErrorHandler . class , null ) ; if ( eh != null ) { eh . setAppender ( appender ) ; PropertySetter propSetter = new PropertySetter ( eh ) ; NodeList children = element . getChildNodes ( ) ; final int length = children . getLength ( ) ; for ( int loop = 0 ; loop < length ; loop ++ ) { Node currentNode = children . item ( loop ) ; if ( currentNode . getNodeType ( ) == Node . ELEMENT_NODE ) { Element currentElement = ( Element ) currentNode ; String tagName = currentElement . getTagName ( ) ; if ( tagName . equals ( PARAM_TAG ) ) { setParameter ( currentElement , propSetter ) ; } else if ( tagName . equals ( APPENDER_REF_TAG ) ) { eh . setBackupAppender ( findAppenderByReference ( currentElement ) ) ; } else if ( tagName . equals ( LOGGER_REF ) ) { String loggerName = currentElement . getAttribute ( REF_ATTR ) ; Logger logger = ( catFactory == null ) ? repository . getLogger ( loggerName ) : repository . getLogger ( loggerName , catFactory ) ; eh . setLogger ( logger ) ; } else if ( tagName . equals ( ROOT_REF ) ) { Logger root = repository . getRootLogger ( ) ; eh . setLogger ( root ) ; } else { quietParseUnrecognizedElement ( eh , currentElement , props ) ; } } } propSetter . activate ( ) ; appender . setErrorHandler ( eh ) ; }
public class VarTensor { /** * Adds a factor to this one . * From libDAI : * The sum of two factors is defined as follows : if * \ f $ f : \ prod _ { l \ in L } X _ l \ to [ 0 , \ infty ) \ f $ and \ f $ g : \ prod _ { m \ in M } X _ m \ to [ 0 , \ infty ) \ f $ , then * \ f [ f + g : \ prod _ { l \ in L \ cup M } X _ l \ to [ 0 , \ infty ) : x \ mapsto f ( x _ L ) + g ( x _ M ) . \ f ] */ public void add ( VarTensor f ) { } }
VarTensor newFactor = applyBinOp ( this , f , new AlgebraLambda . Add ( ) ) ; internalSet ( newFactor ) ;
public class FinishingOperationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . FINISHING_OPERATION__FOP_TYPE : setFOpType ( FOP_TYPE_EDEFAULT ) ; return ; case AfplibPackage . FINISHING_OPERATION__REF_EDGE : setRefEdge ( REF_EDGE_EDEFAULT ) ; return ; case AfplibPackage . FINISHING_OPERATION__FOP_CNT : setFOpCnt ( FOP_CNT_EDEFAULT ) ; return ; case AfplibPackage . FINISHING_OPERATION__AX_OFFST : setAxOffst ( AX_OFFST_EDEFAULT ) ; return ; case AfplibPackage . FINISHING_OPERATION__OP_POS : setOpPos ( OP_POS_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class FullSupportScriptEngine { /** * - - - - - Compilable */ @ Override public CompiledScript compile ( String script ) throws ScriptException { } }
return ( ( Compilable ) engine ) . compile ( script ) ;
public class Resources { /** * Replaces all < code > r1 < / code > { @ link Resource } s with < code > r2 < / code > * { @ link Resource } s . * @ param r1 * @ param r2 */ public final void replace ( final List < Resource > r1 , final List < Resource > r2 ) { } }
this . resources . removeAll ( r1 ) ; this . resources . addAll ( r2 ) ;
public class ListResolverRulesResult { /** * The resolver rules that were created using the current AWS account and that match the specified filters , if any . * @ param resolverRules * The resolver rules that were created using the current AWS account and that match the specified filters , * if any . */ public void setResolverRules ( java . util . Collection < ResolverRule > resolverRules ) { } }
if ( resolverRules == null ) { this . resolverRules = null ; return ; } this . resolverRules = new java . util . ArrayList < ResolverRule > ( resolverRules ) ;
public class ImageUtil { /** * Create a new image using the supplied shape as a mask from which to cut out pixels from the * supplied image . Pixels inside the shape will be added to the final image , pixels outside * the shape will be clear . */ public static BufferedImage composeMaskedImage ( ImageCreator isrc , Shape mask , BufferedImage base ) { } }
int wid = base . getWidth ( ) ; int hei = base . getHeight ( ) ; // alternate method for composition : // 1 . create WriteableRaster with base data // 2 . test each pixel with mask . contains ( ) and set the alpha channel to fully - alpha if false // 3 . create buffered image from raster // ( I didn ' t use this method because it depends on the colormodel of the source image , and // was booching when the souce image was a cut - up from a tileset , and it seems like it // would take longer than the method we are using . But it ' s something to consider ) // composite them by rendering them with an alpha rule BufferedImage target = isrc . createImage ( wid , hei , Transparency . TRANSLUCENT ) ; Graphics2D g2 = target . createGraphics ( ) ; try { g2 . setColor ( Color . BLACK ) ; // whatever , really g2 . fill ( mask ) ; g2 . setComposite ( AlphaComposite . SrcIn ) ; g2 . drawImage ( base , 0 , 0 , null ) ; } finally { g2 . dispose ( ) ; } return target ;
public class SpringWebFluxLinkBuilder { /** * Process an already - built URL just before returning it . * This method can be overridden by any subclasses that want to change this behaviour . * @ param context the execution context . * @ param link the already - built URL . * @ return the processed URL , ready to be used . */ @ Override protected String processLink ( final IExpressionContext context , final String link ) { } }
if ( ! ( context instanceof ISpringWebFluxContext ) ) { return link ; } final ServerWebExchange exchange = ( ( ISpringWebFluxContext ) context ) . getExchange ( ) ; return exchange . transformUrl ( link ) ;
public class EObjects { /** * Checks that the contained object is in a different resource than it ' s owner , making * it a contained proxy . * @ param owner * @ param contained * @ return true if proxy */ public static boolean isContainmentProxy ( DatabindContext ctxt , EObject owner , EObject contained ) { } }
if ( contained . eIsProxy ( ) ) return true ; Resource ownerResource = EMFContext . getResource ( ctxt , owner ) ; Resource containedResource = EMFContext . getResource ( ctxt , contained ) ; return ownerResource != null && ownerResource != containedResource ;
public class BNFHeadersImpl { /** * Place the input int value into the outgoing cache . This will return * the buffer array as it may have changed if the cache need to be flushed . * @ param data * @ param buffers * @ return WsByteBuffer [ ] */ protected WsByteBuffer [ ] putInt ( int data , WsByteBuffer [ ] buffers ) { } }
return putBytes ( GenericUtils . asBytes ( data ) , buffers ) ;
public class JobXMLDescriptorImpl { /** * Adds a new namespace * @ return the current instance of < code > JobXMLDescriptor < / code > */ public JobXMLDescriptor addNamespace ( String name , String value ) { } }
model . attribute ( name , value ) ; return this ;
public class CmsJobManagerApp { /** * Creates the edit view for the given job id . < p > * @ param jobId the id of the job to edit , or null to create a new job * @ param copy < code > true < / code > to create a copy of the given job * @ return the edit view */ public CmsJobEditView openEditDialog ( String jobId , boolean copy ) { } }
if ( m_dialogWindow != null ) { m_dialogWindow . close ( ) ; } m_dialogWindow = CmsBasicDialog . prepareWindow ( DialogWidth . wide ) ; CmsScheduledJobInfo job = null ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( jobId ) ) { job = getElement ( jobId ) ; } CmsScheduledJobInfo jobCopy ; if ( job == null ) { jobCopy = new CmsScheduledJobInfo ( ) ; jobCopy . setContextInfo ( new CmsContextInfo ( ) ) ; m_dialogWindow . setCaption ( CmsVaadinUtils . getMessageText ( org . opencms . workplace . tools . scheduler . Messages . GUI_NEWJOB_ADMIN_TOOL_NAME_0 ) ) ; } else { jobCopy = job . clone ( ) ; jobCopy . setActive ( job . isActive ( ) ) ; if ( copy ) { jobCopy . clearId ( ) ; m_dialogWindow . setCaption ( CmsVaadinUtils . getMessageText ( org . opencms . ui . Messages . GUI_SCHEDULER_TITLE_COPY_1 , job . getJobName ( ) ) ) ; } else { m_dialogWindow . setCaption ( CmsVaadinUtils . getMessageText ( org . opencms . workplace . tools . scheduler . Messages . GUI_JOBS_LIST_ACTION_EDIT_NAME_0 ) ) ; } } CmsJobEditView editPanel = new CmsJobEditView ( this , jobCopy ) ; editPanel . loadFromBean ( jobCopy ) ; m_dialogWindow . setContent ( editPanel ) ; A_CmsUI . get ( ) . addWindow ( m_dialogWindow ) ; m_dialogWindow . center ( ) ; return editPanel ;
public class DeepLearning { /** * Sanity check for Deep Learning job parameters */ private void checkParams ( ) { } }
if ( source . numCols ( ) <= 1 ) throw new IllegalArgumentException ( "Training data must have at least 2 features (incl. response)." ) ; if ( hidden == null ) throw new IllegalArgumentException ( "There must be at least one hidden layer." ) ; for ( int i = 0 ; i < hidden . length ; ++ i ) { if ( hidden [ i ] == 0 ) throw new IllegalArgumentException ( "Hidden layer size must be >0." ) ; } // Auto - fill defaults if ( hidden_dropout_ratios == null ) { if ( activation == Activation . TanhWithDropout || activation == Activation . MaxoutWithDropout || activation == Activation . RectifierWithDropout ) { hidden_dropout_ratios = new double [ hidden . length ] ; if ( ! quiet_mode ) Log . info ( "Automatically setting all hidden dropout ratios to 0.5." ) ; Arrays . fill ( hidden_dropout_ratios , 0.5 ) ; } } else if ( hidden_dropout_ratios . length != hidden . length ) throw new IllegalArgumentException ( "Must have " + hidden . length + " hidden layer dropout ratios." ) ; else if ( activation != Activation . TanhWithDropout && activation != Activation . MaxoutWithDropout && activation != Activation . RectifierWithDropout ) { if ( ! quiet_mode ) Log . info ( "Ignoring hidden_dropout_ratios because a non-Dropout activation function was specified." ) ; } if ( input_dropout_ratio < 0 || input_dropout_ratio >= 1 ) { throw new IllegalArgumentException ( "Input dropout must be in [0,1)." ) ; } if ( class_sampling_factors != null && ! balance_classes ) { if ( ! quiet_mode ) Log . info ( "Ignoring class_sampling_factors since balance_classes is not enabled." ) ; } if ( ! quiet_mode ) { if ( adaptive_rate ) { Log . info ( "Using automatic learning rate. Ignoring the following input parameters:" ) ; Log . info ( " rate, rate_decay, rate_annealing, momentum_start, momentum_ramp, momentum_stable, nesterov_accelerated_gradient." ) ; } else { Log . info ( "Using manual learning rate. Ignoring the following input parameters:" ) ; Log . info ( " rho, epsilon." ) ; } if ( initial_weight_distribution == InitialWeightDistribution . UniformAdaptive ) { Log . info ( "Ignoring initial_weight_scale for UniformAdaptive weight distribution." ) ; } if ( n_folds != 0 ) { if ( override_with_best_model ) { Log . info ( "Automatically setting override_with_best_model to false, since the final model is the only scored model with n-fold cross-validation." ) ; override_with_best_model = false ; } } } if ( loss == Loss . Automatic ) { if ( ! classification ) { if ( ! quiet_mode ) Log . info ( "Automatically setting loss to MeanSquare for regression." ) ; loss = Loss . MeanSquare ; } else if ( autoencoder ) { if ( ! quiet_mode ) Log . info ( "Automatically setting loss to MeanSquare for auto-encoder." ) ; loss = Loss . MeanSquare ; } else { if ( ! quiet_mode ) Log . info ( "Automatically setting loss to Cross-Entropy for classification." ) ; loss = Loss . CrossEntropy ; } } if ( autoencoder && sparsity_beta > 0 ) { if ( activation == Activation . Tanh || activation == Activation . TanhWithDropout ) { if ( average_activation >= 1 || average_activation <= - 1 ) throw new IllegalArgumentException ( "Tanh average activation must be in (-1,1)." ) ; } else if ( activation == Activation . Rectifier || activation == Activation . RectifierWithDropout ) { if ( average_activation <= 0 ) throw new IllegalArgumentException ( "Rectifier average activation must be positive." ) ; } } if ( ! classification && loss == Loss . CrossEntropy ) throw new IllegalArgumentException ( "Cannot use CrossEntropy loss function for regression." ) ; if ( autoencoder && loss != Loss . MeanSquare ) throw new IllegalArgumentException ( "Must use MeanSquare loss function for auto-encoder." ) ; if ( autoencoder && classification ) { classification = false ; Log . info ( "Using regression mode for auto-encoder." ) ; } // reason for the error message below is that validation might not have the same horizontalized features as the training data ( or different order ) if ( autoencoder && validation != null ) throw new UnsupportedOperationException ( "Cannot specify a validation dataset for auto-encoder." ) ; if ( autoencoder && activation == Activation . Maxout ) throw new UnsupportedOperationException ( "Maxout activation is not supported for auto-encoder." ) ; if ( max_categorical_features < 1 ) throw new IllegalArgumentException ( "max_categorical_features must be at least " + 1 ) ; // make default job _ key and destination _ key in case they are missing if ( dest ( ) == null ) { destination_key = Key . make ( ) ; } if ( self ( ) == null ) { job_key = Key . make ( ) ; } if ( UKV . get ( self ( ) ) == null ) { start_time = System . currentTimeMillis ( ) ; state = JobState . RUNNING ; UKV . put ( self ( ) , this ) ; _fakejob = true ; } if ( ! sparse && col_major ) { if ( ! quiet_mode ) throw new IllegalArgumentException ( "Cannot use column major storage for non-sparse data handling." ) ; } if ( reproducible ) { if ( ! quiet_mode ) Log . info ( "Automatically enabling force_load_balancing, disabling single_node_mode and replicate_training_data\nand setting train_samples_per_iteration to -1 to enforce reproducibility." ) ; force_load_balance = true ; single_node_mode = false ; train_samples_per_iteration = - 1 ; replicate_training_data = false ; // there ' s no benefit from having multiple nodes compute the exact same thing , and then average it back to the same // replicate _ training _ data = true ; / / doesn ' t hurt , but does replicated identical work }
public class DateUtils { /** * Create a new Time , with no date component . */ public static java . sql . Time newTime ( ) { } }
return new java . sql . Time ( System . currentTimeMillis ( ) % DAY_MILLIS ) ;
public class AbstractBiclustering { /** * Returns the value of the data matrix at row < code > row < / code > and column * < code > col < / code > . * @ param row the row in the data matrix according to the current order of * rows ( refers to database entry * < code > database . get ( rowIDs [ row ] ) < / code > ) * @ param col the column in the data matrix according to the current order of * rows ( refers to the attribute value of an database entry * < code > getValue ( colIDs [ col ] ) < / code > ) * @ return the attribute value of the database entry as retrieved by * < code > database . get ( rowIDs [ row ] ) . getValue ( colIDs [ col ] ) < / code > */ protected double valueAt ( int row , int col ) { } }
iter . seek ( row ) ; return relation . get ( iter ) . doubleValue ( col ) ;
public class A_CmsTreeTabDataPreloader { /** * Loads the children of the already loaded resources . < p > * @ throws CmsException if something goes wrong */ private void loadChildren ( ) throws CmsException { } }
for ( CmsResource resource : new ArrayList < CmsResource > ( m_knownResources ) ) { if ( resource . isFolder ( ) ) { if ( ! m_mustLoadChildren . contains ( resource ) ) { continue ; } List < CmsResource > children = getChildren ( resource ) ; for ( CmsResource child : children ) { m_knownResources . add ( child ) ; m_childMap . put ( resource , child ) ; } } }
public class AbstractLockTableHandler { /** * { @ inheritDoc } */ public void cleanLocks ( ) throws SQLException { } }
ResultSet resultSet = null ; PreparedStatement preparedStatement = null ; Connection jdbcConnection = openConnection ( ) ; try { InspectionQuery query = getDeleteAllQuery ( ) ; preparedStatement = jdbcConnection . prepareStatement ( query . getStatement ( ) ) ; preparedStatement . executeUpdate ( ) ; } finally { JDBCUtils . freeResources ( resultSet , preparedStatement , jdbcConnection ) ; }
public class DomainsInner { /** * List keys for a domain . * List the two keys used to publish to a domain . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param domainName Name of the domain * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DomainSharedAccessKeysInner object if successful . */ public DomainSharedAccessKeysInner listSharedAccessKeys ( String resourceGroupName , String domainName ) { } }
return listSharedAccessKeysWithServiceResponseAsync ( resourceGroupName , domainName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CloudFoundrySecurityService { /** * Return the URL of the UAA . * @ return the UAA url */ public String getUaaUrl ( ) { } }
if ( this . uaaUrl == null ) { try { Map < ? , ? > response = this . restTemplate . getForObject ( this . cloudControllerUrl + "/info" , Map . class ) ; this . uaaUrl = ( String ) response . get ( "token_endpoint" ) ; } catch ( HttpStatusCodeException ex ) { throw new CloudFoundryAuthorizationException ( Reason . SERVICE_UNAVAILABLE , "Unable to fetch token keys from UAA" ) ; } } return this . uaaUrl ;
public class CompilerConfigModule { /** * $ NON - NLS - 1 $ */ @ Override protected void configure ( ) { } }
VariableDecls . extend ( binder ( ) ) . declareVar ( FILE_ENCODING_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( ENCODING_OPTION , MessageFormat . format ( Messages . CompilerConfigModule_0 , ENCODING_OPTION ) ) . configPath ( FILE_ENCODING_NAME ) . valueOptional ( Messages . CompilerConfigModule_1 ) . defaultValue ( Charset . defaultCharset ( ) . displayName ( ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( JAVA_VERSION_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "javasource" , // $ NON - NLS - 1 $ Messages . CompilerConfigModule_2 ) . configPath ( JAVA_VERSION_NAME ) . valueOptional ( Messages . CompilerConfigModule_3 ) . defaultValue ( SARLVersion . MINIMAL_JDK_VERSION ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( JAVA_COMPILER_NAME ) ; String jcompilerValues = null ; for ( final JavaCompiler jc : JavaCompiler . values ( ) ) { if ( jcompilerValues == null ) { jcompilerValues = jc . toJsonString ( ) ; } else { jcompilerValues = MessageFormat . format ( Messages . CompilerConfigModule_5 , jcompilerValues , jc . toJsonString ( ) ) ; } } extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "javacompiler" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_4 , JavaCompiler . getDefault ( ) . toJsonString ( ) ) ) . configPath ( JAVA_COMPILER_NAME ) . valueOptional ( jcompilerValues ) . defaultValue ( JavaCompiler . getDefault ( ) . toJsonString ( ) ) . build ( ) ) ; final String trueFalseValues = MessageFormat . format ( Messages . CompilerConfigModule_5 , Boolean . TRUE . toString ( ) , Boolean . FALSE . toString ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( OUTPUT_TRACES_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "writetraces" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_6 , Boolean . TRUE ) ) . configPath ( OUTPUT_TRACES_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . TRUE . toString ( ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( OUTPUT_STORAGES_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "writestorages" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_7 , Boolean . TRUE ) ) . configPath ( OUTPUT_STORAGES_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . TRUE . toString ( ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_INLINES_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generateinlines" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_8 , GeneratorConfig2 . DEFAULT_GENERATE_INLINE_ANNOTATION ) ) . configPath ( GENERATE_INLINES_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_INLINE_ANNOTATION ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( COMPRESS_INLINE_EXPRESSIONS_NAME ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_PURES_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generatepures" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_9 , GeneratorConfig2 . DEFAULT_GENERATE_PURE_ANNOTATION ) ) . configPath ( GENERATE_PURES_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_PURE_ANNOTATION ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_EQUALITY_TESTS_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generateequalitytests" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_10 , GeneratorConfig2 . DEFAULT_GENERATE_EQUALITY_TEST_FUNCTIONS ) ) . configPath ( GENERATE_EQUALITY_TESTS_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_EQUALITY_TEST_FUNCTIONS ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_TOSTRING_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generatetostring" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_11 , GeneratorConfig2 . DEFAULT_GENERATE_TOSTRING_FUNCTION ) ) . configPath ( GENERATE_TOSTRING_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_TOSTRING_FUNCTION ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_CLONE_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generateclones" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_12 , GeneratorConfig2 . DEFAULT_GENERATE_CLONE_FUNCTION ) ) . configPath ( GENERATE_CLONE_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_CLONE_FUNCTION ) ) . build ( ) ) ; VariableDecls . extend ( binder ( ) ) . declareVar ( GENERATE_SERIAL_IDS_NAME ) ; extend ( binder ( ) ) . addOption ( OptionMetadata . builder ( "generateserials" , // $ NON - NLS - 1 $ MessageFormat . format ( Messages . CompilerConfigModule_13 , GeneratorConfig2 . DEFAULT_GENERATE_SERIAL_NUMBER_FIELD ) ) . configPath ( GENERATE_SERIAL_IDS_NAME ) . valueOptional ( trueFalseValues ) . defaultValue ( Boolean . toString ( GeneratorConfig2 . DEFAULT_GENERATE_SERIAL_NUMBER_FIELD ) ) . build ( ) ) ;
public class Limiter { /** * Merge other { @ code Limiter } object into this ( other object becomes unusable after that ) . * @ param ls other object to merge * @ return this object */ public Limiter < T > putAll ( Limiter < T > ls ) { } }
int i = 0 ; if ( ! ls . initial ) { // sorted part for ( ; i < limit ; i ++ ) { if ( ! put ( ls . data [ i ] ) ) break ; } i = limit ; } for ( ; i < ls . size ; i ++ ) { put ( ls . data [ i ] ) ; } return this ;
public class DataTracker { /** * Switch the current memtable . This atomically appends a new memtable to the end of the list of active memtables , * returning the previously last memtable . It leaves the previous Memtable in the list of live memtables until * discarding ( memtable ) is called . These two methods must be synchronized / paired , i . e . m = switchMemtable * must be followed by discarding ( m ) , they cannot be interleaved . * @ return the previously active memtable */ public Memtable switchMemtable ( boolean truncating ) { } }
Memtable newMemtable = new Memtable ( cfstore ) ; Memtable toFlushMemtable ; View currentView , newView ; do { currentView = view . get ( ) ; toFlushMemtable = currentView . getCurrentMemtable ( ) ; newView = currentView . switchMemtable ( newMemtable ) ; } while ( ! view . compareAndSet ( currentView , newView ) ) ; if ( truncating ) notifyRenewed ( newMemtable ) ; return toFlushMemtable ;
public class HttpRequestClient { /** * Parse the response and return the string from httpresponse body * @ param response * @ return String * @ throws IOException */ public static String getResult ( HttpResponse response ) throws IOException { } }
StringBuffer result = new StringBuffer ( ) ; if ( response . getEntity ( ) != null && response . getEntity ( ) . getContent ( ) != null ) { BufferedReader rd = new BufferedReader ( new InputStreamReader ( response . getEntity ( ) . getContent ( ) ) ) ; String line = "" ; while ( ( line = rd . readLine ( ) ) != null ) { result . append ( line ) ; } } logger . info ( result . toString ( ) ) ; return result . toString ( ) ;
public class ClusterManagerAvailabilityChecker { /** * Used for getting a client to the CoronaProxyJobTracker * @ param conf * @ return Returns a client to the CPJT * @ throws IOException */ public static CoronaProxyJobTrackerService . Client getPJTClient ( CoronaConf conf ) throws IOException { } }
InetSocketAddress address = NetUtils . createSocketAddr ( conf . getProxyJobTrackerThriftAddress ( ) ) ; TFramedTransport transport = new TFramedTransport ( new TSocket ( address . getHostName ( ) , address . getPort ( ) ) ) ; CoronaProxyJobTrackerService . Client client = new CoronaProxyJobTrackerService . Client ( new TBinaryProtocol ( transport ) ) ; try { transport . open ( ) ; } catch ( TException e ) { LOG . info ( "Transport Exception: " , e ) ; } return client ;
public class SipURIImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipURI # setTTLParam ( int ) */ public void setTTLParam ( int ttl ) { } }
try { getSipURI ( ) . setTTLParam ( ttl ) ; super . parameters . put ( TTL , "" + ttl ) ; } catch ( InvalidArgumentException e ) { logger . error ( "invalid argument" , e ) ; }
public class TermOfUsePanel { /** * Factory method for creating the new { @ link Component } for the salvatorius clause . This method * is invoked in the constructor from the derived classes and can be overridden so users can * provide their own version of a new { @ link Component } for the salvatorius clause . * @ param id * the id * @ param model * the model * @ return the new { @ link Component } for the salvatorius clause */ protected Component newSalvatoriusClausePanel ( final String id , final IModel < HeaderContentListModelBean > model ) { } }
return new SalvatoriusClausePanel ( id , Model . of ( model . getObject ( ) ) ) ;
public class HadoopInputFormatBase { private void writeObject ( ObjectOutputStream out ) throws IOException { } }
super . write ( out ) ; out . writeUTF ( this . mapreduceInputFormat . getClass ( ) . getName ( ) ) ; out . writeUTF ( this . keyClass . getName ( ) ) ; out . writeUTF ( this . valueClass . getName ( ) ) ; this . configuration . write ( out ) ;
public class KxPublisherActor { /** * private . remoted cancel */ public void _cancel ( int id ) { } }
if ( doOnSubscribe != null ) { doOnSubscribe . add ( ( ) -> _cancel ( id ) ) ; } else subscribers . remove ( id ) ;
public class BaasDocument { /** * Asynchronously saves this document on the server ignoring its version . * @ param acl { @ link com . baasbox . android . BaasACL } the initial acl settings * @ param handler a callback to be invoked with the result of the request * @ return a { @ link com . baasbox . android . RequestToken } to handle the request . */ public RequestToken save ( BaasACL acl , BaasHandler < BaasDocument > handler ) { } }
return save ( SaveMode . IGNORE_VERSION , acl , handler ) ;
public class ExcelReader { /** * 读取Excel为Map的列表 < br > * Map表示一行 , 标题为key , 单元格内容为value * @ param headerRowIndex 标题所在行 , 如果标题行在读取的内容行中间 , 这行做为数据将忽略 * @ param startRowIndex 起始行 ( 包含 , 从0开始计数 ) * @ param endRowIndex 读取结束行 ( 包含 , 从0开始计数 ) * @ return Map的列表 */ public List < Map < String , Object > > read ( int headerRowIndex , int startRowIndex , int endRowIndex ) { } }
checkNotClosed ( ) ; // 边界判断 final int firstRowNum = sheet . getFirstRowNum ( ) ; final int lastRowNum = sheet . getLastRowNum ( ) ; if ( headerRowIndex < firstRowNum ) { throw new IndexOutOfBoundsException ( StrUtil . format ( "Header row index {} is lower than first row index {}." , headerRowIndex , firstRowNum ) ) ; } else if ( headerRowIndex > lastRowNum ) { throw new IndexOutOfBoundsException ( StrUtil . format ( "Header row index {} is greater than last row index {}." , headerRowIndex , firstRowNum ) ) ; } startRowIndex = Math . max ( startRowIndex , firstRowNum ) ; // 读取起始行 ( 包含 ) endRowIndex = Math . min ( endRowIndex , lastRowNum ) ; // 读取结束行 ( 包含 ) // 读取header List < Object > headerList = readRow ( sheet . getRow ( headerRowIndex ) ) ; final List < Map < String , Object > > result = new ArrayList < > ( endRowIndex - startRowIndex + 1 ) ; List < Object > rowList ; for ( int i = startRowIndex ; i <= endRowIndex ; i ++ ) { if ( i != headerRowIndex ) { // 跳过标题行 rowList = readRow ( sheet . getRow ( i ) ) ; if ( CollUtil . isNotEmpty ( rowList ) || false == ignoreEmptyRow ) { if ( null == rowList ) { rowList = new ArrayList < > ( 0 ) ; } result . add ( IterUtil . toMap ( aliasHeader ( headerList ) , rowList , true ) ) ; } } } return result ;
public class MediaModuleGenerator { /** * Generation of community tag . * @ param m source * @ param e element to attach new element to */ private void generateCommunity ( final Metadata m , final Element e ) { } }
if ( m . getCommunity ( ) == null ) { return ; } final Element communityElement = new Element ( "community" , NS ) ; if ( m . getCommunity ( ) . getStarRating ( ) != null ) { final Element starRatingElement = new Element ( "starRating" , NS ) ; addNotNullAttribute ( starRatingElement , "average" , m . getCommunity ( ) . getStarRating ( ) . getAverage ( ) ) ; addNotNullAttribute ( starRatingElement , "count" , m . getCommunity ( ) . getStarRating ( ) . getCount ( ) ) ; addNotNullAttribute ( starRatingElement , "min" , m . getCommunity ( ) . getStarRating ( ) . getMin ( ) ) ; addNotNullAttribute ( starRatingElement , "max" , m . getCommunity ( ) . getStarRating ( ) . getMax ( ) ) ; if ( starRatingElement . hasAttributes ( ) ) { communityElement . addContent ( starRatingElement ) ; } } if ( m . getCommunity ( ) . getStatistics ( ) != null ) { final Element statisticsElement = new Element ( "statistics" , NS ) ; addNotNullAttribute ( statisticsElement , "views" , m . getCommunity ( ) . getStatistics ( ) . getViews ( ) ) ; addNotNullAttribute ( statisticsElement , "favorites" , m . getCommunity ( ) . getStatistics ( ) . getFavorites ( ) ) ; if ( statisticsElement . hasAttributes ( ) ) { communityElement . addContent ( statisticsElement ) ; } } if ( m . getCommunity ( ) . getTags ( ) != null && ! m . getCommunity ( ) . getTags ( ) . isEmpty ( ) ) { final Element tagsElement = new Element ( "tags" , NS ) ; for ( final Tag tag : m . getCommunity ( ) . getTags ( ) ) { if ( ! tagsElement . getTextTrim ( ) . isEmpty ( ) ) { tagsElement . addContent ( ", " ) ; } if ( tag . getWeight ( ) == null ) { tagsElement . addContent ( tag . getName ( ) ) ; } else { tagsElement . addContent ( tag . getName ( ) ) ; tagsElement . addContent ( ":" ) ; tagsElement . addContent ( String . valueOf ( tag . getWeight ( ) ) ) ; } } if ( ! tagsElement . getTextTrim ( ) . isEmpty ( ) ) { communityElement . addContent ( tagsElement ) ; } } if ( ! communityElement . getChildren ( ) . isEmpty ( ) ) { e . addContent ( communityElement ) ; }
public class Graph { /** * Removes the given list of vertices and its edges from the graph . * @ param verticesToBeRemoved the list of vertices to be removed * @ return the resulted graph containing the initial vertices and edges minus the vertices * and edges removed . */ public Graph < K , VV , EV > removeVertices ( List < Vertex < K , VV > > verticesToBeRemoved ) { } }
return removeVertices ( this . context . fromCollection ( verticesToBeRemoved ) ) ;
public class SelectPlan { /** * Returns a histogram that , for each field , approximates the distribution * of values from the specified histogram joining with other fields in the * specified group . * Assumes that : * < ul > * < li > Values in a bucket have the same frequency ( uniform frequency ) < / li > * < li > Given values within two equal ranges ( of two joinable fields ) , all * values in the range having smaller number of values appear in the range * having larger number of values < / li > * < li > Distributions of values in different fields are independent with each * other < / li > * < / ul > * @ param hist * the input histogram * @ param group * the group of joining fields * @ return a histogram that , for each field , approximates the distribution * of values from the specified histogram joining with other fields * in the specified group */ public static Histogram joinFieldsHistogram ( Histogram hist , Set < String > group ) { } }
if ( group . size ( ) < 2 ) return new Histogram ( hist ) ; List < String > flds = new ArrayList < String > ( group ) ; Collection < Bucket > jfBkts = hist . buckets ( flds . get ( 0 ) ) ; for ( int i = 1 ; i < flds . size ( ) ; i ++ ) { Collection < Bucket > temp = jfBkts ; jfBkts = new ArrayList < Bucket > ( 2 * jfBkts . size ( ) ) ; for ( Bucket bkt1 : temp ) { for ( Bucket bkt2 : hist . buckets ( flds . get ( i ) ) ) { Bucket jfBkt = joinFieldBucket ( bkt1 , bkt2 , hist . recordsOutput ( ) ) ; if ( jfBkt != null ) jfBkts . add ( jfBkt ) ; } } } double freqSum = 0.0 ; for ( Bucket bkt : jfBkts ) freqSum += bkt . frequency ( ) ; if ( Double . compare ( freqSum , 1.0 ) < 0 ) // no joined bucket return new Histogram ( hist . fields ( ) ) ; double jfReduction = freqSum / hist . recordsOutput ( ) ; if ( Double . compare ( jfReduction , 1.0 ) == 0 ) return new Histogram ( hist ) ; Histogram jfHist = new Histogram ( hist . fields ( ) ) ; for ( String fld : hist . fields ( ) ) { if ( group . contains ( fld ) ) jfHist . setBuckets ( fld , jfBkts ) ; else { for ( Bucket bkt : hist . buckets ( fld ) ) { double restFreq = bkt . frequency ( ) * jfReduction ; if ( Double . compare ( restFreq , 1.0 ) < 0 ) continue ; double restDistVals = Math . min ( bkt . distinctValues ( ) , restFreq ) ; Bucket restBkt = new Bucket ( bkt . valueRange ( ) , restFreq , restDistVals , bkt . valuePercentiles ( ) ) ; jfHist . addBucket ( fld , restBkt ) ; } } } return syncHistogram ( jfHist ) ;
public class Levenshtein { /** * Returns a new Sorensen - Dice coefficient instance with compare target string and k - shingling * @ see SorensenDice * @ param baseTarget * @ param compareTarget * @ param k * @ return */ @ SuppressWarnings ( "unchecked" ) public static < T extends Levenshtein > T sorensenDice ( String baseTarget , String compareTarget , Integer k ) { } }
return ( T ) new SorensenDice ( baseTarget , k ) . update ( compareTarget ) ;
public class FlowTypeUtils { /** * Given an array of expected record types , determine corresponding expected * field types . For example , consider the following simple Whiley snippet : * < pre > * function f ( int x ) - > { int f } : * return { f : x } * < / pre > * The expected type for the expression < code > { f : x } < / code > is * < code > { int f } < / code > . From this , we calculate the expected type for the * expression < code > x < / code > as < code > int < / code > . * @ param field * @ param expected * @ return */ public static Type [ ] typeRecordFieldConstructor ( Type . Record [ ] types , Identifier fieldName ) { } }
Type [ ] fields = new Type [ types . length ] ; for ( int i = 0 ; i != fields . length ; ++ i ) { Type . Record type = types [ i ] ; Type field = type . getField ( fieldName ) ; if ( field == null ) { if ( type . isOpen ( ) ) { field = Type . Any ; } else { return null ; } } fields [ i ] = field ; } fields = ArrayUtils . removeAll ( fields , null ) ; if ( fields . length == 0 ) { return null ; } else { return fields ; }
public class GoodsAdviseCategories { /** * < p > Setter for pAdviseCategory . < / p > * @ param pAdviseCategory reference */ public final void setAdviseCategory ( final AdviseCategoryOfGs pAdviseCategory ) { } }
this . adviseCategory = pAdviseCategory ; if ( this . itsId == null ) { this . itsId = new GoodsAdviseCategoriesId ( ) ; } this . itsId . setAdviseCategory ( this . adviseCategory ) ;
public class WriterCallbacks { /** * Creates a writer callback that writes some byte array to the target stream . * < p > This writer can be used many times . * @ param data the bytes to write * @ return the writer callback */ public static WriterCallback from ( final byte [ ] data ) { } }
return new WriterCallback ( ) { @ Override public void write ( OutputStream os ) throws IOException { os . write ( data ) ; } } ;
public class ExpressionTree { /** * Processes the expression tree , including sub expressions , and returns the * results . * TODO ( cl ) - More tests around indices , etc . This can likely be cleaned up . * @ param query _ results The result set to pass to the expressions * @ return The result set or an exception will bubble up if something wasn ' t * configured properly . */ public DataPoints [ ] evaluate ( final List < DataPoints [ ] > query_results ) { } }
// TODO - size the array final List < DataPoints [ ] > materialized = Lists . newArrayList ( ) ; List < Integer > metric_query_keys = null ; if ( sub_metric_queries != null && sub_metric_queries . size ( ) > 0 ) { metric_query_keys = Lists . newArrayList ( sub_metric_queries . keySet ( ) ) ; Collections . sort ( metric_query_keys ) ; } int metric_pointer = 0 ; int sub_expression_pointer = 0 ; for ( int i = 0 ; i < parameter_index . size ( ) ; i ++ ) { final Parameter param = parameter_index . get ( i ) ; if ( param == Parameter . METRIC_QUERY ) { if ( metric_query_keys == null ) { throw new RuntimeException ( "Attempt to read metric " + "results when none exist" ) ; } final int ix = metric_query_keys . get ( metric_pointer ++ ) ; materialized . add ( query_results . get ( ix ) ) ; } else if ( param == Parameter . SUB_EXPRESSION ) { final ExpressionTree st = sub_expressions . get ( sub_expression_pointer ++ ) ; materialized . add ( st . evaluate ( query_results ) ) ; } else { throw new IllegalDataException ( "Unknown parameter type: " + param + " in tree: " + this ) ; } } return expression . evaluate ( data_query , materialized , func_params ) ;
public class XQuery { /** * Converts a { @ link NamedNodeMap } to a standard { @ link Map } of attributes . * @ param nnm { @ link NamedNodeMap } to convert * @ return { @ link Map } of attributes and their values */ private static @ Nonnull Map < String , String > attributesToMap ( NamedNodeMap nnm ) { } }
return IntStream . range ( 0 , nnm . getLength ( ) ) . mapToObj ( nnm :: item ) . collect ( toMap ( Node :: getNodeName , Node :: getNodeValue ) ) ;
public class LocalityPreservingCooccurrenceSpace { /** * Returns the index in the co - occurence matrix for this word . If the word * was not previously assigned an index , this method adds one for it and * returns that index . */ private final int getIndexFor ( String word ) { } }
Integer index = termToIndex . get ( word ) ; if ( index == null ) { synchronized ( this ) { // recheck to see if the term was added while blocking index = termToIndex . get ( word ) ; // if another thread has not already added this word while the // current thread was blocking waiting on the lock , then add it . if ( index == null ) { int i = wordIndexCounter ++ ; termToIndex . put ( word , i ) ; return i ; // avoid the auto - boxing to assign i to index } } } return index ;
public class TableRef { /** * Updates the provision type and provision load of the referenced table . * < pre > * StorageRef storage = new StorageRef ( " your _ app _ key " , " your _ token " ) ; * TableRef tableRef = storage . table ( " your _ table " ) ; * / / change ProvisionType * / / Note : you can ' t change ProvisionType and ProvisionLoad at the same time * tableRef . update ( StorageRef . StorageProvisionLoad . READ , StorageRef . StorageProvisionType . MEDIUM , new OnTableUpdate ( ) { * & # 064 ; Override * public void run ( String tableName , String status ) { * Log . d ( " TableRef " , " Table : " + tableName + " , status : " + status ) ; * } , new OnError ( ) { * & # 064 ; Override * public void run ( Integer integer , String errorMessage ) { * Log . e ( " TableRef " , " Error updating table : " + errorMessage ) ; * < / pre > * @ param provisionLoad * The new provision load * @ param provisionType * The new provision type * @ param onTableUpdate * The callback to run once the table is updated * @ param onError * The callback to call if an exception occurred * @ return Current table reference */ public TableRef update ( final StorageProvisionLoad provisionLoad , final StorageProvisionType provisionType , final OnTableUpdate onTableUpdate , final OnError onError ) { } }
TableMetadata tm = context . getTableMeta ( this . name ) ; if ( tm == null ) { this . meta ( new OnTableMetadata ( ) { @ Override public void run ( TableMetadata tableMetadata ) { _update ( provisionLoad , provisionType , onTableUpdate , onError ) ; } } , onError ) ; } else { this . _update ( provisionLoad , provisionType , onTableUpdate , onError ) ; } return this ;
public class ServicesProcessDataEventListener { /** * helper methods */ private Integer getInteger ( String value ) { } }
int priority = 0 ; if ( value != null ) { try { priority = new Integer ( value ) ; } catch ( NumberFormatException e ) { // do nothing } } return priority ;
public class AdaptiveTableLayout { /** * Refresh current column header view holder . * @ param holder current view holder */ private void refreshHeaderColumnViewHolder ( ViewHolder holder ) { } }
int left = getEmptySpace ( ) + mManager . getColumnsWidth ( 0 , Math . max ( 0 , holder . getColumnIndex ( ) ) ) ; if ( ! isRTL ( ) ) { left += mManager . getHeaderRowWidth ( ) ; } int top = mSettings . isHeaderFixed ( ) ? 0 : - mState . getScrollY ( ) ; View view = holder . getItemView ( ) ; int leftMargin = holder . getColumnIndex ( ) * mSettings . getCellMargin ( ) + mSettings . getCellMargin ( ) ; int topMargin = holder . getRowIndex ( ) * mSettings . getCellMargin ( ) + mSettings . getCellMargin ( ) ; if ( holder . isDragging ( ) && mDragAndDropPoints . getOffset ( ) . x > 0 ) { left = mState . getScrollX ( ) + mDragAndDropPoints . getOffset ( ) . x - view . getWidth ( ) / 2 ; view . bringToFront ( ) ; } if ( holder . isDragging ( ) ) { View leftShadow = mShadowHelper . getLeftShadow ( ) ; View rightShadow = mShadowHelper . getRightShadow ( ) ; if ( leftShadow != null ) { int shadowLeft = left - mState . getScrollX ( ) ; leftShadow . layout ( Math . max ( mManager . getHeaderRowWidth ( ) - mState . getScrollX ( ) , shadowLeft - SHADOW_THICK ) + leftMargin , 0 , shadowLeft + leftMargin , mSettings . getLayoutHeight ( ) ) ; leftShadow . bringToFront ( ) ; } if ( rightShadow != null ) { int shadowLeft = left + mManager . getColumnWidth ( holder . getColumnIndex ( ) ) - mState . getScrollX ( ) ; rightShadow . layout ( Math . max ( mManager . getHeaderRowWidth ( ) - mState . getScrollX ( ) , shadowLeft ) + leftMargin , 0 , shadowLeft + SHADOW_THICK + leftMargin , mSettings . getLayoutHeight ( ) ) ; rightShadow . bringToFront ( ) ; } } int viewPosLeft = left - mState . getScrollX ( ) + leftMargin ; int viewPosRight = viewPosLeft + mManager . getColumnWidth ( holder . getColumnIndex ( ) ) ; int viewPosTop = top + topMargin ; int viewPosBottom = viewPosTop + mManager . getHeaderColumnHeight ( ) ; // noinspection ResourceType view . layout ( viewPosLeft , viewPosTop , viewPosRight , viewPosBottom ) ; if ( mState . isRowDragging ( ) ) { view . bringToFront ( ) ; } if ( ! mState . isColumnDragging ( ) ) { View shadow = mShadowHelper . getColumnsHeadersShadow ( ) ; if ( shadow == null ) { shadow = mShadowHelper . addColumnsHeadersShadow ( this ) ; } // noinspection ResourceType shadow . layout ( mState . isRowDragging ( ) ? 0 : mSettings . isHeaderFixed ( ) ? 0 : - mState . getScrollX ( ) , top + mManager . getHeaderColumnHeight ( ) , mSettings . getLayoutWidth ( ) , top + mManager . getHeaderColumnHeight ( ) + SHADOW_HEADERS_THICK ) ; shadow . bringToFront ( ) ; }
public class BrowserProxyQueueImpl { /** * Closes the proxy queue . * @ see com . ibm . ws . sib . comms . client . proxyqueue . BrowserProxyQueue # close ( ) */ public void close ( ) throws SIResourceException , SIConnectionLostException , SIErrorException , SIConnectionDroppedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "close" ) ; if ( ! closed ) { // begin D249096 convHelper . closeSession ( ) ; queue . purge ( proxyQueueId ) ; owningGroup . notifyClose ( this ) ; closed = true ; // end D249096 } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "close" ) ;
public class DerbyDatabase { /** * { @ inheritDoc } */ @ Override public DerbyDatabase createTable ( final Connection _con , final String _table /* final String _ parentTable */ ) throws SQLException { } }
final Statement stmt = _con . createStatement ( ) ; try { // create table itself final StringBuilder cmd = new StringBuilder ( ) . append ( "create table " ) . append ( _table ) . append ( " (" ) . append ( " ID bigint not null" ) ; /* TODO / / auto increment if ( _ parentTable = = null ) { cmd . append ( " generated always as identity ( start with 1 , increment by 1 ) " ) ; */ cmd . append ( "," ) . append ( " constraint " ) . append ( _table ) . append ( "_UK_ID unique(ID)" ) ; /* TODO / / foreign key to parent sql table if ( _ parentTable ! = null ) { cmd . append ( " , " ) . append ( " constraint " ) . append ( _ table ) . append ( " _ FK _ ID " ) . append ( " foreign key ( ID ) " ) . append ( " references " ) . append ( _ parentTable ) . append ( " ( ID ) " ) ; */ cmd . append ( ")" ) ; stmt . executeUpdate ( cmd . toString ( ) ) ; } finally { stmt . close ( ) ; } return this ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcCoveringTypeEnum ( ) { } }
if ( ifcCoveringTypeEnumEEnum == null ) { ifcCoveringTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 804 ) ; } return ifcCoveringTypeEnumEEnum ;
public class ImportExportPortletController { /** * Display the entity export form view . * @ param request * @ return */ @ RequestMapping ( params = "action=export" ) public ModelAndView getExportView ( PortletRequest request ) { } }
Map < String , Object > model = new HashMap < String , Object > ( ) ; // add a list of all permitted export types final Iterable < IPortalDataType > exportPortalDataTypes = this . portalDataHandlerService . getExportPortalDataTypes ( ) ; final List < IPortalDataType > types = getAllowedTypes ( request , IPermission . EXPORT_ACTIVITY , exportPortalDataTypes ) ; model . put ( "supportedTypes" , types ) ; return new ModelAndView ( "/jsp/ImportExportPortlet/export" , model ) ;
public class WordShapeClassifier { /** * Returns true if the specified word shaper doesn ' t use * known lower case words , even if a list of them is present . * This is used for backwards compatibility . It is suggested that * new word shape functions are either passed a non - null list of * lowercase words or not , depending on whether you want knownLC marking * ( if it is available in a shaper ) . This is how chris4 works . * @ param shape One of the defined shape constants * @ return true if the specified word shaper uses * known lower case words . */ private static boolean dontUseLC ( int shape ) { } }
return shape == WORDSHAPEDAN2 || shape == WORDSHAPEDAN2BIO || shape == WORDSHAPEJENNY1 || shape == WORDSHAPECHRIS2 || shape == WORDSHAPECHRIS3 ;
public class OracleDdlParser { /** * Parses DDL CREATE PROCEDURE statement * @ param tokens the tokenized { @ link DdlTokenStream } of the DDL input content ; may not be null * @ param parentNode the parent { @ link AstNode } node ; may not be null * @ return the parsed CREATE PROCEDURE statement node * @ throws ParsingException */ protected AstNode parseCreateProcedureStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } }
assert tokens != null ; assert parentNode != null ; markStartOfStatement ( tokens ) ; /* CREATE [ OR REPLACE ] PROCEDURE [ schema . ] procedure _ name [ ( parameter _ declaration [ , parameter _ declaration ] ) ] [ AUTHID { CURRENT _ USER | DEFINER ] { IS | AS } { [ declare _ section ] body | call _ spec | EXTERNAL } ; call _ spec = LANGUAGE { Java _ declaration | C _ declaration } Java _ declaration = JAVA NAME string C _ declaration = C [ NAME name ] LIBRARY lib _ name [ AGENT IN ( argument [ , argument ] . . . ) ] [ WITH CONTEXT ] [ PARAMETERS ( parameter [ , parameter ] . . . ) ] parameter _ declaration = parameter _ name [ IN | { { OUT | { IN OUT } } [ NOCOPY ] } ] datatype [ { : = | DEFAULT } expression ] */ boolean isReplace = tokens . canConsume ( STMT_CREATE_OR_REPLACE_PROCEDURE ) ; tokens . canConsume ( STMT_CREATE_PROCEDURE ) ; String name = parseName ( tokens ) ; AstNode node = nodeFactory ( ) . node ( name , parentNode , TYPE_CREATE_PROCEDURE_STATEMENT ) ; if ( isReplace ) { // TODO : SET isReplace = TRUE to node ( possibly a cnd mixin of " replaceable " } boolean ok = parseParameters ( tokens , node ) ; if ( ok ) { if ( tokens . canConsume ( "AUTHID" ) ) { if ( tokens . canConsume ( "CURRENT_USER" ) ) { node . setProperty ( AUTHID_VALUE , "AUTHID CURRENT_USER" ) ; } else { tokens . consume ( "DEFINER" ) ; node . setProperty ( AUTHID_VALUE , "DEFINER" ) ; } } } parseUntilFwdSlash ( tokens , false ) ; tokens . canConsume ( "/" ) ; markEndOfStatement ( tokens , node ) ; return node ;
public class OgmTableGenerator { /** * Determine the name of the column used to indicate the segment for each * row . This column acts as the primary key . * Called during { @ link # configure configuration } . * @ param params The params supplied in the generator config ( plus some standard useful extras ) . * @ param dialect The dialect in effect * @ return The name of the segment column * @ see # getSegmentColumnName ( ) */ protected String determineSegmentColumnName ( Properties params , Dialect dialect ) { } }
ObjectNameNormalizer normalizer = ( ObjectNameNormalizer ) params . get ( PersistentIdentifierGenerator . IDENTIFIER_NORMALIZER ) ; String name = ConfigurationHelper . getString ( SEGMENT_COLUMN_PARAM , params , DEF_SEGMENT_COLUMN ) ; return normalizer . toDatabaseIdentifierText ( name ) ;
public class AlphaPartitionedIndex { /** * / * ( non - Javadoc ) * @ see org . archive . wayback . ResourceIndex # query ( org . archive . wayback . core . WaybackRequest ) */ public SearchResults query ( WaybackRequest wbRequest ) throws ResourceIndexNotAvailableException , ResourceNotInArchiveException , BadQueryException , AccessControlException { } }
RangeGroup group = getRangeGroupForRequest ( wbRequest ) ; return group . query ( wbRequest ) ;
public class Damages { /** * Set the minimum damage value . Max set to min value if over . * @ param min The minimum damage value ( must be positive ) . */ public void setMin ( int min ) { } }
this . min = UtilMath . clamp ( min , 0 , Integer . MAX_VALUE ) ; max = UtilMath . clamp ( max , this . min , Integer . MAX_VALUE ) ;
public class ExtrasPatternParser { /** * Parse a format specifier . * @ param pattern pattern to parse . * @ param patternConverters list to receive pattern converters . * @ param formattingInfos list to receive field specifiers corresponding to pattern converters . * @ param converterRegistry map of user - supported pattern converters keyed by format specifier , may be null . * @ param rules map of stock pattern converters keyed by format specifier . */ public static void parse ( final String pattern , final List patternConverters , final List formattingInfos , final Map converterRegistry , final Map rules ) { } }
if ( pattern == null ) { throw new NullPointerException ( "pattern" ) ; } StringBuffer currentLiteral = new StringBuffer ( 32 ) ; int patternLength = pattern . length ( ) ; int state = LITERAL_STATE ; char c ; int i = 0 ; ExtrasFormattingInfo formattingInfo = ExtrasFormattingInfo . getDefault ( ) ; while ( i < patternLength ) { c = pattern . charAt ( i ++ ) ; switch ( state ) { case LITERAL_STATE : // In literal state , the last char is always a literal . if ( i == patternLength ) { currentLiteral . append ( c ) ; continue ; } if ( c == ESCAPE_CHAR ) { // peek at the next char . switch ( pattern . charAt ( i ) ) { case ESCAPE_CHAR : currentLiteral . append ( c ) ; i ++ ; // move pointer break ; default : if ( currentLiteral . length ( ) != 0 ) { patternConverters . add ( new LiteralPatternConverter ( currentLiteral . toString ( ) ) ) ; formattingInfos . add ( ExtrasFormattingInfo . getDefault ( ) ) ; } currentLiteral . setLength ( 0 ) ; currentLiteral . append ( c ) ; // append % state = CONVERTER_STATE ; formattingInfo = ExtrasFormattingInfo . getDefault ( ) ; } } else { currentLiteral . append ( c ) ; } break ; case CONVERTER_STATE : currentLiteral . append ( c ) ; switch ( c ) { case '-' : formattingInfo = new ExtrasFormattingInfo ( true , formattingInfo . isRightTruncated ( ) , formattingInfo . getMinLength ( ) , formattingInfo . getMaxLength ( ) ) ; break ; case '!' : formattingInfo = new ExtrasFormattingInfo ( formattingInfo . isLeftAligned ( ) , true , formattingInfo . getMinLength ( ) , formattingInfo . getMaxLength ( ) ) ; break ; case '.' : state = DOT_STATE ; break ; default : if ( ( c >= '0' ) && ( c <= '9' ) ) { formattingInfo = new ExtrasFormattingInfo ( formattingInfo . isLeftAligned ( ) , formattingInfo . isRightTruncated ( ) , c - '0' , formattingInfo . getMaxLength ( ) ) ; state = MIN_STATE ; } else { i = finalizeConverter ( c , pattern , i , currentLiteral , formattingInfo , converterRegistry , rules , patternConverters , formattingInfos ) ; // Next pattern is assumed to be a literal . state = LITERAL_STATE ; formattingInfo = ExtrasFormattingInfo . getDefault ( ) ; currentLiteral . setLength ( 0 ) ; } } // switch break ; case MIN_STATE : currentLiteral . append ( c ) ; if ( ( c >= '0' ) && ( c <= '9' ) ) { formattingInfo = new ExtrasFormattingInfo ( formattingInfo . isLeftAligned ( ) , formattingInfo . isRightTruncated ( ) , ( formattingInfo . getMinLength ( ) * 10 ) + ( c - '0' ) , formattingInfo . getMaxLength ( ) ) ; } else if ( c == '.' ) { state = DOT_STATE ; } else { i = finalizeConverter ( c , pattern , i , currentLiteral , formattingInfo , converterRegistry , rules , patternConverters , formattingInfos ) ; state = LITERAL_STATE ; formattingInfo = ExtrasFormattingInfo . getDefault ( ) ; currentLiteral . setLength ( 0 ) ; } break ; case DOT_STATE : currentLiteral . append ( c ) ; if ( ( c >= '0' ) && ( c <= '9' ) ) { formattingInfo = new ExtrasFormattingInfo ( formattingInfo . isLeftAligned ( ) , formattingInfo . isRightTruncated ( ) , formattingInfo . getMinLength ( ) , c - '0' ) ; state = MAX_STATE ; } else { LogLog . error ( "Error occured in position " + i + ".\n Was expecting digit, instead got char \"" + c + "\"." ) ; state = LITERAL_STATE ; } break ; case MAX_STATE : currentLiteral . append ( c ) ; if ( ( c >= '0' ) && ( c <= '9' ) ) { formattingInfo = new ExtrasFormattingInfo ( formattingInfo . isLeftAligned ( ) , formattingInfo . isRightTruncated ( ) , formattingInfo . getMinLength ( ) , ( formattingInfo . getMaxLength ( ) * 10 ) + ( c - '0' ) ) ; } else { i = finalizeConverter ( c , pattern , i , currentLiteral , formattingInfo , converterRegistry , rules , patternConverters , formattingInfos ) ; state = LITERAL_STATE ; formattingInfo = ExtrasFormattingInfo . getDefault ( ) ; currentLiteral . setLength ( 0 ) ; } break ; } // switch } // while if ( currentLiteral . length ( ) != 0 ) { patternConverters . add ( new LiteralPatternConverter ( currentLiteral . toString ( ) ) ) ; formattingInfos . add ( ExtrasFormattingInfo . getDefault ( ) ) ; }
public class WebApplicationHandler { private FilterChain getChainForPath ( int requestType , String pathInContext , ServletHolder servletHolder ) { } }
if ( _filterChainsCached ) { synchronized ( this ) { if ( _chainCache [ requestType ] . containsKey ( pathInContext ) ) return ( FilterChain ) _chainCache [ requestType ] . get ( pathInContext ) ; } } // Build list of filters Object filters = null ; // Path filters for ( int i = 0 ; i < _pathFilters . size ( ) ; i ++ ) { FilterMapping mapping = ( FilterMapping ) _pathFilters . get ( i ) ; if ( mapping . appliesTo ( pathInContext , requestType ) ) filters = LazyList . add ( filters , mapping . getHolder ( ) ) ; } // Servlet filters if ( servletHolder != null && _servletFilterMap . size ( ) > 0 ) { Object o = _servletFilterMap . get ( servletHolder . getName ( ) ) ; for ( int i = 0 ; i < LazyList . size ( o ) ; i ++ ) { FilterMapping mapping = ( FilterMapping ) LazyList . get ( o , i ) ; if ( mapping . appliesTo ( null , requestType ) ) filters = LazyList . add ( filters , mapping . getHolder ( ) ) ; } } FilterChain chain = null ; if ( _filterChainsCached ) { synchronized ( this ) { if ( LazyList . size ( filters ) > 0 ) chain = new CachedChain ( filters , servletHolder ) ; _chainCache [ requestType ] . put ( pathInContext , chain ) ; } } else if ( LazyList . size ( filters ) > 0 ) chain = new Chain ( filters , servletHolder ) ; return chain ;
public class ComputeNodesImpl { /** * Restarts the specified compute node . * You can restart a node only if it is in an idle or running state . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node that you want to restart . * @ param nodeRebootOption When to reboot the compute node and what to do with currently running tasks . The default value is requeue . Possible values include : ' requeue ' , ' terminate ' , ' taskCompletion ' , ' retainedData ' * @ param computeNodeRebootOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void reboot ( String poolId , String nodeId , ComputeNodeRebootOption nodeRebootOption , ComputeNodeRebootOptions computeNodeRebootOptions ) { } }
rebootWithServiceResponseAsync ( poolId , nodeId , nodeRebootOption , computeNodeRebootOptions ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DrawerBuilder { /** * Call this method to append a new DrawerBuilder to a existing Drawer . * @ param result the Drawer . Result of an existing Drawer * @ return */ public Drawer append ( @ NonNull Drawer result ) { } }
if ( mUsed ) { throw new RuntimeException ( "you must not reuse a DrawerBuilder builder" ) ; } if ( mDrawerGravity == null ) { throw new RuntimeException ( "please set the gravity for the drawer" ) ; } // set that this builder was used . now you have to create a new one mUsed = true ; mAppended = true ; // get the drawer layout from the previous drawer mDrawerLayout = result . getDrawerLayout ( ) ; // get the slider view mSliderLayout = ( ScrimInsetsRelativeLayout ) mActivity . getLayoutInflater ( ) . inflate ( R . layout . material_drawer_slider , mDrawerLayout , false ) ; mSliderLayout . setBackgroundColor ( UIUtils . getThemeColorFromAttrOrRes ( mActivity , R . attr . material_drawer_background , R . color . material_drawer_background ) ) ; // get the layout params DrawerLayout . LayoutParams params = ( DrawerLayout . LayoutParams ) mSliderLayout . getLayoutParams ( ) ; // set the gravity of this drawerGravity params . gravity = mDrawerGravity ; // if this is a drawer from the right , change the margins : D params = DrawerUtils . processDrawerLayoutParams ( this , params ) ; // set the new params mSliderLayout . setLayoutParams ( params ) ; // define id for the sliderLayout mSliderLayout . setId ( R . id . material_drawer_slider_layout ) ; // add the slider to the drawer mDrawerLayout . addView ( mSliderLayout , 1 ) ; // create the content createContent ( ) ; // create the result object Drawer appendedResult = new Drawer ( this ) ; // toggle selection list if we were previously on the account list if ( mSavedInstance != null && mSavedInstance . getBoolean ( Drawer . BUNDLE_DRAWER_CONTENT_SWITCHED_APPENDED , false ) ) { mAccountHeader . toggleSelectionList ( mActivity ) ; } // forget the reference to the activity mActivity = null ; return appendedResult ;
public class hqlParser { /** * hql . g : 564:1 : whenClause : ( WHEN ^ logicalExpression THEN ! expression ) ; */ public final hqlParser . whenClause_return whenClause ( ) throws RecognitionException { } }
hqlParser . whenClause_return retval = new hqlParser . whenClause_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token WHEN216 = null ; Token THEN218 = null ; ParserRuleReturnScope logicalExpression217 = null ; ParserRuleReturnScope expression219 = null ; CommonTree WHEN216_tree = null ; CommonTree THEN218_tree = null ; try { // hql . g : 565:2 : ( ( WHEN ^ logicalExpression THEN ! expression ) ) // hql . g : 565:4 : ( WHEN ^ logicalExpression THEN ! expression ) { root_0 = ( CommonTree ) adaptor . nil ( ) ; // hql . g : 565:4 : ( WHEN ^ logicalExpression THEN ! expression ) // hql . g : 565:5 : WHEN ^ logicalExpression THEN ! expression { WHEN216 = ( Token ) match ( input , WHEN , FOLLOW_WHEN_in_whenClause2651 ) ; WHEN216_tree = ( CommonTree ) adaptor . create ( WHEN216 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( WHEN216_tree , root_0 ) ; pushFollow ( FOLLOW_logicalExpression_in_whenClause2654 ) ; logicalExpression217 = logicalExpression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , logicalExpression217 . getTree ( ) ) ; THEN218 = ( Token ) match ( input , THEN , FOLLOW_THEN_in_whenClause2656 ) ; pushFollow ( FOLLOW_expression_in_whenClause2659 ) ; expression219 = expression ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , expression219 . getTree ( ) ) ; } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { // do for sure before leaving } return retval ;
public class MappedField { /** * Gets the value of the field mapped on the instance given . * @ param instance the instance to use * @ return the value stored in the java field */ public Object getFieldValue ( final Object instance ) { } }
try { return field . get ( instance ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; }
public class CmsInlineEntityWidget { /** * Initializes the button styling . < p > */ private void initButtons ( ) { } }
m_addButton . addChoice ( m_attributeHandler . getWidgetService ( ) , new CmsChoiceMenuEntryBean ( m_attributeHandler . getAttributeName ( ) ) , new AsyncCallback < CmsChoiceMenuEntryBean > ( ) { public void onFailure ( Throwable caught ) { // will not be called } public void onSuccess ( CmsChoiceMenuEntryBean selectedEntry ) { // nothing to do } } ) ; m_addButton . addDomHandler ( new ClickHandler ( ) { public void onClick ( ClickEvent event ) { m_addButton . hide ( ) ; addNewAttributeValue ( ) ; event . preventDefault ( ) ; event . stopPropagation ( ) ; } } , ClickEvent . getType ( ) ) ; m_editButton . setImageClass ( I_CmsButton . PEN_SMALL ) ; m_editButton . setButtonStyle ( ButtonStyle . FONT_ICON , null ) ; m_removeButton . setImageClass ( I_CmsButton . CUT_SMALL ) ; m_removeButton . setButtonStyle ( ButtonStyle . FONT_ICON , null ) ; m_upButton . setImageClass ( I_CmsButton . EDIT_UP_SMALL ) ; m_upButton . setButtonStyle ( ButtonStyle . FONT_ICON , null ) ; m_downButton . setImageClass ( I_CmsButton . EDIT_DOWN_SMALL ) ; m_downButton . setButtonStyle ( ButtonStyle . FONT_ICON , null ) ; if ( CmsEditorBase . hasDictionary ( ) ) { String label = m_widgetService . getAttributeLabel ( m_attributeHandler . getAttributeName ( ) ) ; m_addButton . setTitle ( CmsEditorBase . getMessageForKey ( CmsEditorBase . GUI_VIEW_ADD_1 , label ) ) ; m_removeButton . setTitle ( CmsEditorBase . getMessageForKey ( CmsEditorBase . GUI_VIEW_DELETE_1 , label ) ) ; m_upButton . setTitle ( CmsEditorBase . getMessageForKey ( CmsEditorBase . GUI_VIEW_MOVE_UP_0 ) ) ; m_downButton . setTitle ( CmsEditorBase . getMessageForKey ( CmsEditorBase . GUI_VIEW_MOVE_DOWN_0 ) ) ; m_title = CmsEditorBase . getMessageForKey ( CmsEditorBase . GUI_VIEW_EDIT_1 , label ) ; m_editButton . setTitle ( m_title ) ; }
public class AttributeMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Attribute attribute , ProtocolMarshaller protocolMarshaller ) { } }
if ( attribute == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( attribute . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( attribute . getScore ( ) , SCORE_BINDING ) ; protocolMarshaller . marshall ( attribute . getRelationshipScore ( ) , RELATIONSHIPSCORE_BINDING ) ; protocolMarshaller . marshall ( attribute . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( attribute . getBeginOffset ( ) , BEGINOFFSET_BINDING ) ; protocolMarshaller . marshall ( attribute . getEndOffset ( ) , ENDOFFSET_BINDING ) ; protocolMarshaller . marshall ( attribute . getText ( ) , TEXT_BINDING ) ; protocolMarshaller . marshall ( attribute . getTraits ( ) , TRAITS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RESTCatalog { /** * - - - - - Private methods */ private void addOwner ( UNode ownerNode ) { } }
String ownerName = ownerNode . getName ( ) ; SortedMap < String , RESTCommand > ownerMap = new TreeMap < > ( ) ; m_cmdsByOwnerMap . put ( ownerName , ownerMap ) ; for ( UNode cmdNode : ownerNode . getMemberList ( ) ) { RESTCommand cmd = RESTCommand . fromUNode ( cmdNode ) ; ownerMap . put ( cmd . getName ( ) , cmd ) ; }
public class PoolDisableAutoScaleHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the PoolDisableAutoScaleHeaders object itself . */ public PoolDisableAutoScaleHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;