signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class XsdAsmAttributes { /** * Generates a method to add a given { @ link XsdAttribute } . * @ param classWriter The { @ link ClassWriter } of the class where the method that adds the { @ link XsdAttribute } will * be generated . * @ param elementAttribute The { @ link XsdAttribute } containing the information to create the method . * @ param returnType The return type of the generated method . This return type is different based on if the class * where the method is being generated is a class or an interface . * @ param className The name of the class where the method is being added . * @ param apiName The name of the generated fluent interface . */ static void generateMethodsForAttribute ( ClassWriter classWriter , XsdAttribute elementAttribute , String returnType , String className , String apiName ) { } }
String attributeName = ATTRIBUTE_PREFIX + getCleanName ( elementAttribute ) ; String camelCaseName = attributeName . toLowerCase ( ) . charAt ( 0 ) + attributeName . substring ( 1 ) ; String attributeClassType = getFullClassTypeName ( getAttributeName ( elementAttribute ) , apiName ) ; String attributeGroupInterfaceType = getFullClassTypeName ( className , apiName ) ; boolean isInterfaceMethod = isInterfaceMethod ( returnType ) ; String attrName = firstToLower ( ATTRIBUTE_PREFIX + getCleanName ( elementAttribute ) ) ; String javaType = getFullJavaType ( elementAttribute ) ; String containingType ; String signature ; if ( attributeHasEnum ( elementAttribute ) ) { javaType = getFullClassTypeNameDesc ( getEnumName ( elementAttribute ) , apiName ) ; } containingType = javaType ; if ( isInterfaceMethod ) { signature = "(" + javaType + ")TT;" ; } else { signature = "(" + javaType + ")" + returnType . substring ( 0 , returnType . length ( ) - 1 ) + "<TZ;>;" ; } int access = isInterfaceMethod ? ACC_PUBLIC : ACC_PUBLIC + ACC_FINAL ; MethodVisitor mVisitor = classWriter . visitMethod ( access , camelCaseName , "(" + javaType + ")" + returnType , signature , null ) ; mVisitor . visitLocalVariable ( attrName , javaType , null , new Label ( ) , new Label ( ) , 1 ) ; mVisitor . visitCode ( ) ; if ( attributeHasRestrictions ( elementAttribute ) ) { mVisitor . visitVarInsn ( ALOAD , 1 ) ; mVisitor . visitMethodInsn ( INVOKESTATIC , attributeClassType , "validateRestrictions" , "(" + javaType + ")V" , false ) ; } mVisitor . visitVarInsn ( ALOAD , 0 ) ; if ( isInterfaceMethod ) { mVisitor . visitMethodInsn ( INVOKEINTERFACE , attributeGroupInterfaceType , "getVisitor" , "()" + elementVisitorTypeDesc , true ) ; } else { mVisitor . visitFieldInsn ( GETFIELD , attributeGroupInterfaceType , "visitor" , elementVisitorTypeDesc ) ; } mVisitor . visitVarInsn ( ALOAD , 1 ) ; if ( attributeHasEnum ( elementAttribute ) ) { containingType = getEnumContainingType ( elementAttribute ) ; mVisitor . visitMethodInsn ( INVOKEVIRTUAL , javaType . substring ( 1 , javaType . length ( ) - 1 ) , "getValue" , "()" + containingType , false ) ; } if ( containingType != null && ! containingType . equals ( JAVA_STRING_DESC ) ) { mVisitor . visitMethodInsn ( INVOKEVIRTUAL , JAVA_OBJECT , "toString" , "()" + JAVA_STRING_DESC , false ) ; } mVisitor . visitMethodInsn ( INVOKEVIRTUAL , elementVisitorType , "visitAttribute" + getCleanName ( elementAttribute . getName ( ) ) , "(" + JAVA_STRING_DESC + ")V" , false ) ; mVisitor . visitVarInsn ( ALOAD , 0 ) ; if ( isInterfaceMethod ) { mVisitor . visitMethodInsn ( INVOKEINTERFACE , attributeGroupInterfaceType , "self" , "()" + elementTypeDesc , true ) ; } mVisitor . visitInsn ( ARETURN ) ; mVisitor . visitMaxs ( 3 , 2 ) ; mVisitor . visitEnd ( ) ;
public class WebGroup { /** * Method getSessionContext . * @ param moduleConfig * @ param webApp * @ return IHttpSessionContext */ @ SuppressWarnings ( "unchecked" ) public IHttpSessionContext getSessionContext ( com . ibm . ws . container . DeployedModule moduleConfig , WebApp webApp , ArrayList [ ] listeners ) throws Throwable { } }
// System . out . println ( " WebGroup createSC " ) ; return ( ( VirtualHost ) parent ) . getSessionContext ( moduleConfig , webApp , listeners ) ;
public class MobileApplication { /** * Gets the appStore value for this MobileApplication . * @ return appStore * The app store the mobile application belongs to . This attribute * is required for * creation and then is read - only . */ public com . google . api . ads . admanager . axis . v201811 . MobileApplicationStore getAppStore ( ) { } }
return appStore ;
public class JspUrlResourceFileSource { /** * / * ( non - Javadoc ) * @ see org . archive . wayback . resourcestore . resourcefile . ResourceFileSource # getBasename ( java . lang . String ) */ public String getBasename ( String path ) { } }
int sepIdx = path . lastIndexOf ( WEB_SEPARATOR_CHAR ) ; if ( sepIdx != - 1 ) { return path . substring ( sepIdx + 1 ) ; } return path ;
public class SmartObject { /** * This protected method allows a subclass to add to the mappers a class type that can be * serialized using mixin class . * @ param serializable The type of class that can be serialized using its toString ( ) method . * @ param mixin The type of class that can be used to serialized the serializable class . */ protected void addSerializableClass ( Class < ? > serializable , Class < ? > mixin ) { } }
safeMapper . addMixIn ( serializable , mixin ) ; fullMapper . addMixIn ( serializable , mixin ) ;
public class StatementParameter { /** * 生成PreparedStatementSetter对象 . * @ return PreparedStatementSetter */ public PreparedStatementSetter getParameters ( ) { } }
if ( list . size ( ) == 0 ) { return null ; } PreparedStatementSetter param = new PreparedStatementSetter ( ) { @ Override public void setValues ( PreparedStatement pstmt ) { try { StatementParameter . this . setValues ( pstmt ) ; } catch ( SQLException e ) { throw new InvalidParamDataAccessException ( e ) ; } } } ; return param ;
public class MpxjQuery { /** * Helper method called recursively to list child tasks . * @ param task task whose children are to be displayed * @ param indent whitespace used to indent hierarchy levels */ private static void listHierarchy ( Task task , String indent ) { } }
for ( Task child : task . getChildTasks ( ) ) { System . out . println ( indent + "Task: " + child . getName ( ) + "\t" + child . getStart ( ) + "\t" + child . getFinish ( ) ) ; listHierarchy ( child , indent + " " ) ; }
public class ResteasyClientFactoryImpl { /** * N . B . This method signature may change in the future to add new parameters * @ param fastFail * @ param authScope * @ param credentials * @ param preemptiveAuth * @ param storeCookies * @ param customiser * @ return */ public Consumer < HttpClientBuilder > createHttpClientCustomiser ( final boolean fastFail , final AuthScope authScope , final Credentials credentials , final boolean preemptiveAuth , final boolean storeCookies , Consumer < HttpClientBuilder > customiser ) { } }
// Customise timeouts if fast fail mode is enabled if ( fastFail ) { customiser = concat ( customiser , b -> { RequestConfig . Builder requestBuilder = RequestConfig . custom ( ) ; requestBuilder . setConnectTimeout ( ( int ) fastFailConnectionTimeout . getMilliseconds ( ) ) . setSocketTimeout ( ( int ) fastFailSocketTimeout . getMilliseconds ( ) ) ; b . setDefaultRequestConfig ( requestBuilder . build ( ) ) ; } ) ; } // If credentials were supplied then we should set them up if ( credentials != null ) { CredentialsProvider credentialsProvider = new BasicCredentialsProvider ( ) ; if ( authScope != null ) credentialsProvider . setCredentials ( authScope , credentials ) ; else credentialsProvider . setCredentials ( AuthScope . ANY , credentials ) ; // Set up bearer auth scheme provider if we ' re using bearer credentials if ( credentials instanceof BearerCredentials ) { customiser = concat ( customiser , b -> { Registry < AuthSchemeProvider > authSchemeRegistry = RegistryBuilder . < AuthSchemeProvider > create ( ) . register ( "Bearer" , new BearerAuthSchemeProvider ( ) ) . build ( ) ; b . setDefaultAuthSchemeRegistry ( authSchemeRegistry ) ; } ) ; } // Set up the credentials customisation customiser = concat ( customiser , b -> b . setDefaultCredentialsProvider ( credentialsProvider ) ) ; if ( preemptiveAuth && credentials instanceof BearerCredentials ) customiser = concat ( customiser , b -> b . addInterceptorFirst ( new PreemptiveBearerAuthInterceptor ( ) ) ) ; else customiser = concat ( customiser , b -> b . addInterceptorLast ( new PreemptiveBasicAuthInterceptor ( ) ) ) ; } // If cookies are enabled then set up a cookie store if ( storeCookies ) customiser = concat ( customiser , b -> b . setDefaultCookieStore ( new BasicCookieStore ( ) ) ) ; return customiser ;
public class IfcCartesianPointList2DImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < ListOfIfcLengthMeasure > getCoordList ( ) { } }
return ( EList < ListOfIfcLengthMeasure > ) eGet ( Ifc4Package . Literals . IFC_CARTESIAN_POINT_LIST2_D__COORD_LIST , true ) ;
public class RLS { /** * Learn a new instance with online regression . * @ param x the training instances . * @ param y the target values . */ public void learn ( double [ ] [ ] x , double y [ ] ) { } }
if ( x . length != y . length ) { throw new IllegalArgumentException ( String . format ( "Input vector x of size %d not equal to length %d of y" , x . length , y . length ) ) ; } for ( int i = 0 ; i < x . length ; i ++ ) { learn ( x [ i ] , y [ i ] ) ; }
public class ConciseSet { /** * { @ inheritDoc } */ @ Override public ConciseSet convert ( Collection < Integer > c ) { } }
ConciseSet res = empty ( ) ; Collection < Integer > sorted ; if ( c != null ) { if ( c instanceof SortedSet < ? > && ( ( SortedSet < ? > ) c ) . comparator ( ) == null ) { sorted = c ; } else { sorted = new ArrayList < Integer > ( c ) ; Collections . sort ( ( List < Integer > ) sorted ) ; } for ( int i : sorted ) { if ( res . last != i ) { res . add ( i ) ; } } } return res ;
public class Async { /** * Returns an Observable that starts the specified asynchronous factory function whenever a new observer * subscribes . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / deferFuture . s . png " alt = " " > * @ param < T > the result type * @ param observableFactoryAsync the asynchronous function to start for each observer * @ param scheduler the Scheduler where the completion of the Future is awaited * @ return the Observable emitting items produced by the asynchronous observer produced by the factory * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Async - Operators # wiki - deferfuture " > RxJava Wiki : deferFuture ( ) < / a > */ public static < T > Observable < T > deferFuture ( Func0 < ? extends Future < ? extends Observable < ? extends T > > > observableFactoryAsync , Scheduler scheduler ) { } }
return OperatorDeferFuture . deferFuture ( observableFactoryAsync , scheduler ) ;
public class ServletConfidentialityConstraintHandler { /** * Use the HttpServerExchange supplied to check if this request is already ' sufficiently ' confidential . * Here we say ' sufficiently ' as sub - classes can override this and maybe even go so far as querying the actual SSLSession . * @ param exchange - The { @ link HttpServerExchange } for the request being processed . * @ return true if the request is ' sufficiently ' confidential , false otherwise . */ protected boolean isConfidential ( final HttpServerExchange exchange ) { } }
ServletRequestContext src = exchange . getAttachment ( ServletRequestContext . ATTACHMENT_KEY ) ; if ( src != null ) { return src . getOriginalRequest ( ) . isSecure ( ) ; } return super . isConfidential ( exchange ) ;
public class JobHistoryService { /** * Returns the most recent { @ link Flow } runs within that time range , up to * { @ code limit } instances . If the { @ code version } parameter is non - null , the * returned results will be restricted to those matching this app version . * @ param cluster the cluster where the jobs were run * @ param user the user running the jobs * @ param appId the application identifier for the jobs * @ param version if non - null , only flows matching this application version * will be returned * @ param startTime the start time for the flows to be looked at * @ param endTime the end time for the flows to be looked at * @ param populateTasks if { @ code true } , then TaskDetails will be populated * for each job * @ param limit the maximum number of flows to return * @ return */ public List < Flow > getFlowSeries ( String cluster , String user , String appId , String version , boolean populateTasks , long startTime , long endTime , int limit ) throws IOException { } }
// TODO : use RunMatchFilter to limit scan on the server side byte [ ] rowPrefix = Bytes . toBytes ( cluster + Constants . SEP + user + Constants . SEP + appId + Constants . SEP ) ; Scan scan = createFlowScan ( rowPrefix , limit , version ) ; // set the start and stop rows for scan so that it ' s time bound if ( endTime != 0 ) { byte [ ] scanStartRow ; // use end time in start row , if present long endRunId = FlowKey . encodeRunId ( endTime ) ; scanStartRow = Bytes . add ( rowPrefix , Bytes . toBytes ( endRunId ) , Constants . SEP_BYTES ) ; scan . setStartRow ( scanStartRow ) ; } if ( startTime != 0 ) { byte [ ] scanStopRow ; // use start time in stop row , if present long stopRunId = FlowKey . encodeRunId ( startTime ) ; scanStopRow = Bytes . add ( rowPrefix , Bytes . toBytes ( stopRunId ) , Constants . SEP_BYTES ) ; scan . setStopRow ( scanStopRow ) ; } return createFromResults ( scan , populateTasks , limit ) ;
public class Overrider { /** * Checks if override is present , but it won ' t be actually read * @ param operation name of the operation * @ return true if override is present */ public boolean hasOverride ( String operation ) { } }
boolean result = false ; if ( this . overrideOnce . containsKey ( operation ) == true || this . override . containsKey ( operation ) == true ) { result = true ; } return result ;
public class Builder { /** * get distance of one GeneralName from another * @ param base GeneralName at base of subtree * @ param test GeneralName to be tested against base * @ param incomparable the value to return if the names are * incomparable * @ return distance of test name from base , where 0 * means exact match , 1 means test is an immediate * child of base , 2 means test is a grandchild , etc . * - 1 means test is a parent of base , - 2 means test * is a grandparent , etc . */ static int distance ( GeneralNameInterface base , GeneralNameInterface test , int incomparable ) { } }
switch ( base . constrains ( test ) ) { case GeneralNameInterface . NAME_DIFF_TYPE : if ( debug != null ) { debug . println ( "Builder.distance(): Names are different types" ) ; } return incomparable ; case GeneralNameInterface . NAME_SAME_TYPE : if ( debug != null ) { debug . println ( "Builder.distance(): Names are same type but " + "in different subtrees" ) ; } return incomparable ; case GeneralNameInterface . NAME_MATCH : return 0 ; case GeneralNameInterface . NAME_WIDENS : break ; case GeneralNameInterface . NAME_NARROWS : break ; default : // should never occur return incomparable ; } /* names are in same subtree */ return test . subtreeDepth ( ) - base . subtreeDepth ( ) ;
public class BoxFile { /** * Gets any previous versions of this file . Note that only users with premium accounts will be able to retrieve * previous versions of their files . * @ return a list of previous file versions . */ public Collection < BoxFileVersion > getVersions ( ) { } }
URL url = VERSIONS_URL_TEMPLATE . build ( this . getAPI ( ) . getBaseURL ( ) , this . getID ( ) ) ; BoxAPIRequest request = new BoxAPIRequest ( this . getAPI ( ) , url , "GET" ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject jsonObject = JsonObject . readFrom ( response . getJSON ( ) ) ; JsonArray entries = jsonObject . get ( "entries" ) . asArray ( ) ; Collection < BoxFileVersion > versions = new ArrayList < BoxFileVersion > ( ) ; for ( JsonValue entry : entries ) { versions . add ( new BoxFileVersion ( this . getAPI ( ) , entry . asObject ( ) , this . getID ( ) ) ) ; } return versions ;
public class Viterbi { /** * Viterbi inference . * @ param seq the seq */ public void viterbiInference ( List seq ) { } }
int i , j , k ; int seqLen = seq . size ( ) ; if ( seqLen <= 0 ) { return ; } if ( memorySize < seqLen ) { allocateMemory ( seqLen ) ; } // compute Vi for the first position in the sequence computeVi ( seq , 0 , Vi , true ) ; for ( j = 0 ; j < numLabels ; j ++ ) { memory [ 0 ] [ j ] . first = Vi . vect [ j ] ; memory [ 0 ] [ j ] . second = j ; } // scaling for the first position divide ( memory [ 0 ] , sum ( memory [ 0 ] ) ) ; // the main loop for ( i = 1 ; i < seqLen ; i ++ ) { // compute Vi at the position i computeVi ( seq , i , Vi , true ) ; // for all possible labels at the position i for ( j = 0 ; j < numLabels ; j ++ ) { memory [ i ] [ j ] . first = 0.0 ; memory [ i ] [ j ] . second = 0 ; // find the maximal value and its index and store them in memory // for later tracing back to find the best path for ( k = 0 ; k < numLabels ; k ++ ) { double tempVal = memory [ i - 1 ] [ k ] . first * Mi . mtrx [ k ] [ j ] * Vi . vect [ j ] ; if ( tempVal > memory [ i ] [ j ] . first ) { memory [ i ] [ j ] . first = tempVal ; memory [ i ] [ j ] . second = k ; } } } // scaling for memory at position i divide ( memory [ i ] , sum ( memory [ i ] ) ) ; } // viterbi backtrack to find the best label path int maxIdx = findMax ( memory [ seqLen - 1 ] ) ; ( ( Observation ) seq . get ( seqLen - 1 ) ) . modelLabel = maxIdx ; for ( i = seqLen - 2 ; i >= 0 ; i -- ) { ( ( Observation ) seq . get ( i ) ) . modelLabel = memory [ i + 1 ] [ maxIdx ] . second ; maxIdx = ( ( Observation ) seq . get ( i ) ) . modelLabel ; }
public class JDBCStorageConnection { /** * Returns from storage the next page of nodes and its properties . */ public List < NodeDataIndexing > getNodesAndProperties ( String lastNodeId , int offset , int limit ) throws RepositoryException , IllegalStateException { } }
List < NodeDataIndexing > result = new ArrayList < NodeDataIndexing > ( ) ; checkIfOpened ( ) ; try { startTxIfNeeded ( ) ; ResultSet resultSet = findNodesAndProperties ( lastNodeId , offset , limit ) ; int processed = 0 ; try { TempNodeData tempNodeData = null ; while ( resultSet . next ( ) ) { if ( tempNodeData == null ) { tempNodeData = new TempNodeData ( resultSet ) ; processed ++ ; } else if ( ! resultSet . getString ( COLUMN_ID ) . equals ( tempNodeData . cid ) ) { if ( ! needToSkipOffsetNodes ( ) || processed > offset ) { result . add ( createNodeDataIndexing ( tempNodeData ) ) ; } tempNodeData = new TempNodeData ( resultSet ) ; processed ++ ; } if ( ! needToSkipOffsetNodes ( ) || processed > offset ) { String key = resultSet . getString ( "P_NAME" ) ; SortedSet < TempPropertyData > values = tempNodeData . properties . get ( key ) ; if ( values == null ) { values = new TreeSet < TempPropertyData > ( ) ; tempNodeData . properties . put ( key , values ) ; } values . add ( new ExtendedTempPropertyData ( resultSet ) ) ; } } if ( tempNodeData != null && ( ! needToSkipOffsetNodes ( ) || processed > offset ) ) { result . add ( createNodeDataIndexing ( tempNodeData ) ) ; } } finally { try { resultSet . close ( ) ; } catch ( SQLException e ) { LOG . error ( "Can't close the ResultSet: " + e . getMessage ( ) ) ; } } } catch ( IOException e ) { throw new RepositoryException ( e ) ; } catch ( IllegalNameException e ) { throw new RepositoryException ( e ) ; } catch ( SQLException e ) { throw new RepositoryException ( e ) ; } if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "getNodesAndProperties(%s, %s, %s) = %s elements" , lastNodeId , offset , limit , result . size ( ) ) ; } return result ;
public class DSFactory { /** * 创建数据源实现工厂 < br > * 此方法通过 “ 试错 ” 方式查找引入项目的连接池库 , 按照优先级寻找 , 一旦寻找到则创建对应的数据源工厂 < br > * 连接池优先级 : Hikari > Druid > Tomcat > Dbcp > C3p0 > Hutool Pooled * @ return 日志实现类 * @ since 4.1.3 */ private static DSFactory doCreate ( Setting setting ) { } }
try { return new HikariDSFactory ( setting ) ; } catch ( NoClassDefFoundError e ) { // ignore } try { return new DruidDSFactory ( setting ) ; } catch ( NoClassDefFoundError e ) { // ignore } try { return new TomcatDSFactory ( setting ) ; } catch ( NoClassDefFoundError e ) { // 如果未引入包 , 此处会报org . apache . tomcat . jdbc . pool . PoolConfiguration未找到错误 // 因为org . apache . tomcat . jdbc . pool . DataSource实现了此接口 , 会首先检查接口的存在与否 // ignore } try { return new DbcpDSFactory ( setting ) ; } catch ( NoClassDefFoundError e ) { // ignore } try { return new C3p0DSFactory ( setting ) ; } catch ( NoClassDefFoundError e ) { // ignore } return new PooledDSFactory ( setting ) ;
public class DefaultSelendroidDriver { /** * @ see org . openqa . selenium . android . server . AndroidDriver # takeScreenshot ( ) */ @ Override @ SuppressWarnings ( "deprecation" ) public byte [ ] takeScreenshot ( ) { } }
ViewHierarchyAnalyzer viewAnalyzer = ViewHierarchyAnalyzer . getDefaultInstance ( ) ; // TODO ddary review later , but with getRecentDecorView ( ) it seems to work better // long drawingTime = 0; // View container = null ; // for ( View view : viewAnalyzer . getTopLevelViews ( ) ) { // if ( view ! = null & & view . isShown ( ) & & view . hasWindowFocus ( ) // & & view . getDrawingTime ( ) > drawingTime ) { // container = view ; // drawingTime = view . getDrawingTime ( ) ; // final View mainView = container ; final View mainView = viewAnalyzer . getRecentDecorView ( ) ; if ( mainView == null ) { throw new SelendroidException ( "No open windows." ) ; } done = false ; long end = System . currentTimeMillis ( ) + serverInstrumentation . getAndroidWait ( ) . getTimeoutInMillis ( ) ; final byte [ ] [ ] rawPng = new byte [ 1 ] [ 1 ] ; serverInstrumentation . getCurrentActivity ( ) . runOnUiThread ( new Runnable ( ) { public void run ( ) { synchronized ( syncObject ) { Display display = serverInstrumentation . getCurrentActivity ( ) . getWindowManager ( ) . getDefaultDisplay ( ) ; Point size = new Point ( ) ; try { display . getSize ( size ) ; } catch ( NoSuchMethodError ignore ) { // Older than api level 13 size . x = display . getWidth ( ) ; size . y = display . getHeight ( ) ; } // Get root view View view = mainView . getRootView ( ) ; // Create the bitmap to use to draw the screenshot final Bitmap bitmap = Bitmap . createBitmap ( size . x , size . y , Bitmap . Config . ARGB_8888 ) ; final Canvas canvas = new Canvas ( bitmap ) ; // Get current theme to know which background to use final Activity activity = serverInstrumentation . getCurrentActivity ( ) ; final Theme theme = activity . getTheme ( ) ; final TypedArray ta = theme . obtainStyledAttributes ( new int [ ] { android . R . attr . windowBackground } ) ; final int res = ta . getResourceId ( 0 , 0 ) ; final Drawable background = activity . getResources ( ) . getDrawable ( res ) ; // Draw background background . draw ( canvas ) ; // Draw views view . draw ( canvas ) ; ByteArrayOutputStream stream = new ByteArrayOutputStream ( ) ; if ( ! bitmap . compress ( Bitmap . CompressFormat . PNG , 70 , stream ) ) { throw new RuntimeException ( "Error while compressing screenshot image." ) ; } try { stream . flush ( ) ; stream . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "I/O Error while capturing screenshot: " + e . getMessage ( ) ) ; } finally { Closeable closeable = ( Closeable ) stream ; try { if ( closeable != null ) { closeable . close ( ) ; } } catch ( IOException ioe ) { // ignore } } rawPng [ 0 ] = stream . toByteArray ( ) ; mainView . destroyDrawingCache ( ) ; done = true ; syncObject . notify ( ) ; } } } ) ; waitForDone ( end , serverInstrumentation . getAndroidWait ( ) . getTimeoutInMillis ( ) , "Failed to take screenshot." ) ; return rawPng [ 0 ] ;
public class SecondsBasedEntryTaskScheduler { /** * Cancels the scheduled future and removes the entries map for the given second If no entries are left * Cleans up parent container ( second - > entries map ) if it doesn ' t hold anymore items this second . * Cancels associated scheduler ( second - > scheduler map ) if there are no more items to remove for this second . * @ param second second at which this entry was scheduled to be evicted * @ param entries entries which were already scheduled to be evicted for this second */ private void cleanUpScheduledFuturesIfEmpty ( Integer second , Map < Object , ScheduledEntry < K , V > > entries ) { } }
if ( entries . isEmpty ( ) ) { scheduledEntries . remove ( second ) ; ScheduledFuture removedFeature = scheduledTaskMap . remove ( second ) ; if ( removedFeature != null ) { removedFeature . cancel ( false ) ; } }
public class WavefrontStrings { /** * Create a map of tags for wavefront . * The tag values are escaped and should be surrounded by double quotes . * This function does not put the surrounding quotes around the tag values . */ public static Map < String , String > tags ( Tags tags ) { } }
return tags . stream ( ) . map ( WavefrontStrings :: createTagEntry ) . flatMap ( opt -> opt . map ( Stream :: of ) . orElseGet ( Stream :: empty ) ) . map ( WavefrontStrings :: maybeTruncateTagEntry ) . collect ( Collectors . toMap ( Map . Entry :: getKey , Map . Entry :: getValue ) ) ;
public class CommonOps_DDRM { /** * Element - wise exp operation < br > * c < sub > ij < / sub > = Math . log ( a < sub > ij < / sub > ) * @ param A input * @ param C output ( modified ) */ public static void elementExp ( DMatrixD1 A , DMatrixD1 C ) { } }
if ( A . numCols != C . numCols || A . numRows != C . numRows ) { throw new MatrixDimensionException ( "All matrices must be the same shape" ) ; } int size = A . getNumElements ( ) ; for ( int i = 0 ; i < size ; i ++ ) { C . data [ i ] = Math . exp ( A . data [ i ] ) ; }
public class JasperReportBuilder { /** * Set the directory and test that the directory exists and is contained within the Configuration * directory . * @ param directory the new directory */ public void setDirectory ( final String directory ) { } }
this . directory = new File ( this . configuration . getDirectory ( ) , directory ) ; if ( ! this . directory . exists ( ) ) { throw new IllegalArgumentException ( String . format ( "Directory does not exist: %s.\n" + "Configuration contained value %s which is supposed to be relative to " + "configuration directory." , this . directory , directory ) ) ; } if ( ! this . directory . getAbsolutePath ( ) . startsWith ( this . configuration . getDirectory ( ) . getAbsolutePath ( ) ) ) { throw new IllegalArgumentException ( String . format ( "All files and directories must be contained in the configuration directory the " + "directory provided in the configuration breaks that contract: %s in config " + "file resolved to %s." , directory , this . directory ) ) ; }
public class ProcessCommonJSModules { /** * Recognize if a node is a dynamic module import . Currently only the webpack dynamic import is * recognized : * < ul > * < li > _ _ webpack _ require _ _ . e ( 0 ) . then ( function ( ) { return _ _ webpack _ require _ _ ( 4 ) ; } ) * < li > Promise . all ( [ _ _ webpack _ require _ _ . e ( 0 ) ] ) . then ( function ( ) { return * _ _ webpack _ require _ _ ( 4 ) ; } ) * < / ul > */ public static boolean isCommonJsDynamicImportCallback ( Node n , ModuleLoader . ResolutionMode resolutionMode ) { } }
if ( n == null || resolutionMode != ModuleLoader . ResolutionMode . WEBPACK ) { return false ; } if ( n . isFunction ( ) && isWebpackRequireEnsureCallback ( n ) ) { return true ; } return false ;
public class LocalDateTime { /** * Queries this date - time using the specified query . * This queries this date - time using the specified query strategy object . * The { @ code TemporalQuery } object defines the logic to be used to * obtain the result . Read the documentation of the query to understand * what the result of this method will be . * The result of this method is obtained by invoking the * { @ link TemporalQuery # queryFrom ( TemporalAccessor ) } method on the * specified query passing { @ code this } as the argument . * @ param < R > the type of the result * @ param query the query to invoke , not null * @ return the query result , null may be returned ( defined by the query ) * @ throws DateTimeException if unable to query ( defined by the query ) * @ throws ArithmeticException if numeric overflow occurs ( defined by the query ) */ @ SuppressWarnings ( "unchecked" ) @ Override // override for Javadoc public < R > R query ( TemporalQuery < R > query ) { } }
if ( query == TemporalQueries . localDate ( ) ) { return ( R ) date ; } return ChronoLocalDateTime . super . query ( query ) ;
public class Transition { /** * Adds a listener to the set of listeners that are sent events through the * life of an animation , such as start , repeat , and end . * @ param listener the listener to be added to the current set of listeners * for this animation . * @ return This transition object . */ @ NonNull public Transition addListener ( @ NonNull TransitionListener listener ) { } }
if ( mListeners == null ) { mListeners = new ArrayList < TransitionListener > ( ) ; } mListeners . add ( listener ) ; return this ;
public class TangoCacheManager { /** * Get cache of a command * @ param cmd * The command * @ return The command cache */ public synchronized SelfPopulatingCache getCommandCache ( final CommandImpl cmd ) { } }
SelfPopulatingCache cache = null ; if ( cmd . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATE_NAME ) ) { cache = stateCache . getCache ( ) ; } else if ( cmd . getName ( ) . equalsIgnoreCase ( DeviceImpl . STATUS_NAME ) ) { cache = statusCache . getCache ( ) ; } else { CommandCache cmdCache = commandCacheMap . get ( cmd ) ; if ( cmdCache == null ) { cmdCache = extTrigCommandCacheMap . get ( cmd ) ; } cache = cmdCache . getCache ( ) ; } return cache ;
public class HopkinsStatisticClusteringTendency { /** * Search nearest neighbors for < em > real < / em > data members . * @ param knnQuery KNN query * @ param relation Data relation * @ return Aggregated 1NN distances */ protected double computeNNForRealData ( final KNNQuery < NumberVector > knnQuery , Relation < NumberVector > relation , final int dim ) { } }
double w = 0. ; ModifiableDBIDs dataSampleIds = DBIDUtil . randomSample ( relation . getDBIDs ( ) , sampleSize , random ) ; for ( DBIDIter iter = dataSampleIds . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final double kdist = knnQuery . getKNNForDBID ( iter , k + 1 ) . getKNNDistance ( ) ; w += MathUtil . powi ( kdist , dim ) ; } return w ;
public class MultiLanguageTextProcessor { /** * Format the given multiLanguageText by removing duplicated white spaces , underscores and camel cases in all entries . * @ param multiLanguageText the multiLanguageText to format . * @ return the formatted multiLanguageText . */ public static MultiLanguageText . Builder format ( final MultiLanguageText . Builder multiLanguageText ) { } }
for ( final MapFieldEntry . Builder entryBuilder : multiLanguageText . getEntryBuilderList ( ) ) { final String value = entryBuilder . getValue ( ) ; entryBuilder . clearValue ( ) ; entryBuilder . setValue ( format ( value ) ) ; } return multiLanguageText ;
public class LongRangeRandomizer { /** * Create a new { @ link LongRangeRandomizer } . * @ param min min value * @ param max max value * @ param seed initial seed * @ return a new { @ link LongRangeRandomizer } . */ public static LongRangeRandomizer aNewLongRangeRandomizer ( final Long min , final Long max , final long seed ) { } }
return new LongRangeRandomizer ( min , max , seed ) ;
public class WikipediaTemplateInfoGenerator { /** * Start generator */ public void process ( ) throws Exception { } }
WikipediaTemplateInfo info = new WikipediaTemplateInfo ( getWiki ( ) ) ; pageTableExists = info . tableExists ( GeneratorConstants . TABLE_TPLID_PAGEID ) ; revisionTableExists = info . tableExists ( GeneratorConstants . TABLE_TPLID_REVISIONID ) ; if ( ! pageTableExists && ! revisionTableExists && mode . active_for_pages && mode . active_for_revisions ) { // TODO see fix - me comment in WikipediaTemplateInfoDumpWriter throw new IllegalStateException ( "Currently, you cannot create revision-tpl index and page-tpl index at the same time. The code is there, but it currently assigns separate tpl-name-ids for page-tpls and revisions-tpls. Please create a revision-tpl index, import the data into the db, create the page-tpl index and import this data." ) ; } if ( mode . useRevisionIterator ) { if ( mode . active_for_revisions ) { processRevisions ( ) ; } if ( mode . active_for_pages ) { processPages ( ) ; } } else { try { extractTemplates ( ) ; } catch ( WikiApiException e ) { logger . error ( "Error extracting templates." , e ) ; } } logger . info ( "Generating template indices ..." ) ; boolean tableWithTemplatesExists = false ; tableWithTemplatesExists = true ; if ( mode . active_for_pages && pageTableExists ) { generateTemplateIndices ( info , TPLNAME_TO_PAGEIDS . keySet ( ) ) ; } if ( mode . active_for_revisions && revisionTableExists ) { generateTemplateIndices ( info , TPLNAME_TO_REVISIONIDS . keySet ( ) ) ; } logger . info ( "Writing SQL dump ..." ) ; WikipediaTemplateInfoDumpWriter writer = new WikipediaTemplateInfoDumpWriter ( this . outputPath , this . charset , this . tplNameToTplId , tableWithTemplatesExists ) ; mode . templateNameToPageId = TPLNAME_TO_PAGEIDS ; mode . templateNameToRevId = TPLNAME_TO_REVISIONIDS ; writer . writeSQL ( revisionTableExists , pageTableExists , mode ) ;
public class AnalysisContext { /** * Report an error */ static public void logError ( String msg , Exception e ) { } }
AnalysisContext currentAnalysisContext2 = currentAnalysisContext ( ) ; if ( currentAnalysisContext2 == null ) { if ( e instanceof NoSuchBugPattern ) { return ; } /* if ( false & & SystemProperties . ASSERTIONS _ ENABLED ) { AssertionError e2 = new AssertionError ( " Exception logged with no analysis context " ) ; e2 . initCause ( e ) ; throw e2; */ e . printStackTrace ( System . err ) ; return ; } if ( e instanceof MissingClassException ) { reportMissingClass ( ( ( MissingClassException ) e ) . getClassNotFoundException ( ) ) ; return ; } if ( e instanceof edu . umd . cs . findbugs . classfile . MissingClassException ) { reportMissingClass ( ( ( edu . umd . cs . findbugs . classfile . MissingClassException ) e ) . toClassNotFoundException ( ) ) ; return ; } RepositoryLookupFailureCallback lookupFailureCallback = currentAnalysisContext2 . getLookupFailureCallback ( ) ; if ( lookupFailureCallback != null ) { lookupFailureCallback . logError ( msg , e ) ; }
public class CompilerInput { /** * Gets a list of types provided , but does not attempt to * regenerate the dependency information . Typically this occurs * from module rewriting . */ ImmutableCollection < String > getKnownProvides ( ) { } }
return concat ( dependencyInfo != null ? dependencyInfo . getProvides ( ) : ImmutableList . < String > of ( ) , extraProvides ) ;
public class WebcamUtils { /** * Capture image as BYteBuffer . * @ param webcam the webcam from which image should be obtained * @ param format the file format * @ return Byte buffer */ public static final ByteBuffer getImageByteBuffer ( Webcam webcam , String format ) { } }
return ByteBuffer . wrap ( getImageBytes ( webcam , format ) ) ;
public class UrlUtils { /** * Retrieve the var value for varName from a HTTP query string ( format is * " var1 = val1 & amp ; var2 = val2 " ) . * @ param varName the name . * @ param haystack the haystack . * @ return variable value for varName */ public static String getVarFromQueryString ( String varName , String haystack ) { } }
if ( haystack == null || haystack . length ( ) == 0 ) { return null ; } String modifiedHaystack = haystack ; if ( modifiedHaystack . charAt ( 0 ) == '?' ) { modifiedHaystack = modifiedHaystack . substring ( 1 ) ; } String [ ] vars = modifiedHaystack . split ( "&" ) ; for ( String var : vars ) { String [ ] tuple = var . split ( "=" ) ; if ( tuple . length == 2 && tuple [ 0 ] . equals ( varName ) ) { return tuple [ 1 ] ; } } return null ;
public class ManagementResource { /** * Get the file name extension from its name . * @ param filename the filename . * @ return the file name extension . */ private static String getFileExtension ( final String filename ) { } }
String extension = "" ; int i = filename . lastIndexOf ( '.' ) ; int p = Math . max ( filename . lastIndexOf ( '/' ) , filename . lastIndexOf ( '\\' ) ) ; if ( i > p ) extension = filename . substring ( i + 1 ) ; return extension ;
public class Matrices { /** * MatrixSort is able to sort matrix rows when matrix is stored in one dimensional * array as in DenseMatrix64F . * < p > Sort is using Quick Sort algorithm . * @ param matrix * @ param comparator */ public static void sort ( DenseMatrix64F matrix , RowComparator comparator ) { } }
int len = matrix . numCols ; quickSort ( matrix . data , 0 , matrix . numRows - 1 , len , comparator , new double [ len ] , new double [ len ] ) ;
public class AsciidocRuleParserPlugin { /** * Evaluates required concepts of a rule . * @ param ruleSource * The rule source . * @ param attributes * The attributes of an asciidoc rule block * @ param id * The id . * @ return A map where the keys represent the ids of required concepts and the * values if they are optional . * @ throws RuleException * If the dependencies cannot be evaluated . */ private Map < String , Boolean > getRequiresConcepts ( RuleSource ruleSource , String id , Attributes attributes ) throws RuleException { } }
Map < String , String > requiresDeclarations = getReferences ( attributes , REQUIRES_CONCEPTS ) ; Map < String , Boolean > required = new HashMap < > ( ) ; for ( Map . Entry < String , String > requiresEntry : requiresDeclarations . entrySet ( ) ) { String conceptId = requiresEntry . getKey ( ) ; String dependencyAttribute = requiresEntry . getValue ( ) ; Boolean optional = dependencyAttribute != null ? OPTIONAL . equals ( dependencyAttribute . toLowerCase ( ) ) : null ; required . put ( conceptId , optional ) ; } return required ;
public class IOUtil { /** * Note : copied from Google Guava under Apache License v2. * Maps a file in to memory as per * { @ link FileChannel # map ( java . nio . channels . FileChannel . MapMode , long , long ) } * using the requested { @ link MapMode } . * < p > Files are mapped from offset 0 to { @ code size } . * < p > If the mode is { @ link MapMode # READ _ WRITE } and the file does not exist , * it will be created with the requested { @ code size } . Thus this method is * useful for creating memory mapped files which do not yet exist . * < p > This only works for files < = { @ link Integer # MAX _ VALUE } bytes . * @ param file the file to map * @ param mode the mode to use when mapping { @ code file } * @ param offset * @ param len * @ return a buffer reflecting { @ code file } * @ see FileChannel # map ( MapMode , long , long ) * @ since 2.0 */ public static MappedByteBuffer map ( File file , MapMode mode , long offset , long len ) throws UncheckedIOException { } }
N . checkArgNotNull ( file ) ; N . checkArgNotNull ( mode ) ; RandomAccessFile raf = null ; try { raf = new RandomAccessFile ( file , mode == MapMode . READ_ONLY ? "r" : "rw" ) ; return raf . getChannel ( ) . map ( mode , offset , len ) ; } catch ( IOException e ) { throw new UncheckedIOException ( e ) ; } finally { IOUtil . closeQuietly ( raf ) ; }
public class HttpFields { /** * Get multiple header of the same name * @ param header the header * @ return List the values */ public List < String > getValuesList ( HttpHeader header ) { } }
final List < String > list = new ArrayList < > ( ) ; for ( HttpField f : this ) if ( f . getHeader ( ) == header ) list . add ( f . getValue ( ) ) ; return list ;
public class LogRecord { /** * 次のメッセージを持つ操作ログオブジェクトを作成します 。 * @ param logger * ロガー * @ param position * 要素位置 * @ param testStep * テストステップ * @ param pattern * メッセージパターン * @ param params * メッセージパラメーター * @ return 操作ログ */ public static LogRecord create ( SitLogger logger , ElementPosition position , TestStep testStep , MessagePattern pattern , Object ... params ) { } }
Object [ ] newParams = new Object [ ] { testStep . getItemName ( ) , testStep . getLocator ( ) } ; newParams = ArrayUtils . addAll ( newParams , params ) ; return create ( logger , position , testStep , pattern . getPattern ( ) , newParams ) ;
public class ExampleFourierTransform { /** * Demonstration of how to apply a box filter in the frequency domain and compares the results * to a box filter which has been applied in the spatial domain */ public static void applyBoxFilter ( GrayF32 input ) { } }
// declare storage GrayF32 boxImage = new GrayF32 ( input . width , input . height ) ; InterleavedF32 boxTransform = new InterleavedF32 ( input . width , input . height , 2 ) ; InterleavedF32 transform = new InterleavedF32 ( input . width , input . height , 2 ) ; GrayF32 blurredImage = new GrayF32 ( input . width , input . height ) ; GrayF32 spatialBlur = new GrayF32 ( input . width , input . height ) ; DiscreteFourierTransform < GrayF32 , InterleavedF32 > dft = DiscreteFourierTransformOps . createTransformF32 ( ) ; // Make the image scaled from 0 to 1 to reduce overflow issues PixelMath . divide ( input , 255.0f , input ) ; // compute the Fourier Transform dft . forward ( input , transform ) ; // create the box filter which is centered around the pixel . Note that the filter gets wrapped around // the image edges for ( int y = 0 ; y < 15 ; y ++ ) { int yy = y - 7 < 0 ? boxImage . height + ( y - 7 ) : y - 7 ; for ( int x = 0 ; x < 15 ; x ++ ) { int xx = x - 7 < 0 ? boxImage . width + ( x - 7 ) : x - 7 ; // Set the value such that it doesn ' t change the image intensity boxImage . set ( xx , yy , 1.0f / ( 15 * 15 ) ) ; } } // compute the DFT for the box filter dft . forward ( boxImage , boxTransform ) ; // Visualize the Fourier Transform for the input image and the box filter displayTransform ( transform , "Input Image" ) ; displayTransform ( boxTransform , "Box Filter" ) ; // apply the filter . convolution in spacial domain is the same as multiplication in the frequency domain DiscreteFourierTransformOps . multiplyComplex ( transform , boxTransform , transform ) ; // convert the image back and display the results dft . inverse ( transform , blurredImage ) ; // undo change of scale PixelMath . multiply ( blurredImage , 255.0f , blurredImage ) ; PixelMath . multiply ( input , 255.0f , input ) ; // For sake of comparison , let ' s compute the box blur filter in the spatial domain // NOTE : The image border will be different since the frequency domain wraps around and this implementation // of the spacial domain adapts the kernel size BlurImageOps . mean ( input , spatialBlur , 7 , null , null ) ; // Convert to BufferedImage for output BufferedImage originOut = ConvertBufferedImage . convertTo ( input , null ) ; BufferedImage spacialOut = ConvertBufferedImage . convertTo ( spatialBlur , null ) ; BufferedImage blurredOut = ConvertBufferedImage . convertTo ( blurredImage , null ) ; ListDisplayPanel listPanel = new ListDisplayPanel ( ) ; listPanel . addImage ( originOut , "Original Image" ) ; listPanel . addImage ( spacialOut , "Spacial Domain Box" ) ; listPanel . addImage ( blurredOut , "Frequency Domain Box" ) ; ShowImages . showWindow ( listPanel , "Box Blur in Spacial and Frequency Domain of Input Image" ) ;
public class RegexRequestMatcher { /** * Performs the match of the request URL ( { @ code servletPath + pathInfo + queryString } ) against * the compiled * pattern . * @ param request the request to match * @ return true if the pattern matches the URL , false otherwise . */ public boolean matches ( HttpServletRequest request ) { } }
if ( httpMethod != null && httpMethod != HttpMethod . valueOf ( request . getMethod ( ) ) ) { return false ; } String url = request . getServletPath ( ) ; String pathInfo = request . getPathInfo ( ) ; String query = request . getQueryString ( ) ; if ( pathInfo != null || query != null ) { StringBuilder sb = new StringBuilder ( url ) ; if ( pathInfo != null ) sb . append ( pathInfo ) ; if ( query != null ) sb . append ( query ) ; url = sb . toString ( ) ; } logger . debug ( "Checking match of request : '{}'; against '{}'" , url , pattern ) ; return pattern . matcher ( url ) . matches ( ) ;
public class ConfigEntry { /** * This method returns an array of DependencyId objects that specified addditional cache * indentifers that associated multiple cache entries to the same group identiifier . * @ return Array of DependencyId objects */ public DependencyId [ ] getDependencyIds ( ) { } }
DependencyId [ ] depIds = new DependencyId [ configEntry . dependencyIds . length ] ; for ( int i = 0 ; i < configEntry . dependencyIds . length ; i ++ ) { depIds [ i ] = new DependencyId ( configEntry . dependencyIds [ i ] ) ; } return depIds ;
public class CUstreamWriteValue_flags { /** * Returns the String identifying the given CUstreamWriteValue _ flags * @ param n The CUstreamWriteValue _ flags * @ return The String identifying the given CUstreamWriteValue _ flags */ public static String stringFor ( int n ) { } }
if ( n == 0 ) { return "CU_STREAM_WRITE_VALUE_DEFAULT" ; } String result = "" ; if ( ( n & CU_STREAM_WRITE_VALUE_NO_MEMORY_BARRIER ) != 0 ) result += "CU_STREAM_WRITE_VALUE_NO_MEMORY_BARRIER " ; return result ;
public class ResourceLimit { /** * Check if the node has enough memory to run tasks * @ param node node to check * @ return true if the node has enough memory , false otherwise */ public boolean hasEnoughMemory ( ClusterNode node ) { } }
int total = node . getTotal ( ) . memoryMB ; int free = node . getFree ( ) . memoryMB ; if ( free < nodeReservedMemoryMB ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( node . getHost ( ) + " not enough memory." + " totalMB:" + total + " free:" + free + " limit:" + nodeReservedMemoryMB ) ; } return false ; } return true ;
public class DataSiftPylon { /** * Compile a CSDL string to a stream hash to which you can later subscribe and receive interactions from . * For information on this endpoint see documentation page : * http : / / dev . datasift . com / pylon / docs / api / pylon - api - endpoints / pyloncompile * @ param csdl the CSDL to compile * @ return a stream object representing the DataSift compiled CSDL , use { @ link com . datasift . client . core * . Stream # hash ( ) } * to list the hash for the compiled CSDL */ public FutureData < PylonStream > compile ( String csdl ) { } }
FutureData < PylonStream > future = new FutureData < PylonStream > ( ) ; URI uri = newParams ( ) . forURL ( config . newAPIEndpointURI ( COMPILE ) ) ; JSONRequest request = config . http ( ) . postJSON ( uri , new PageReader ( newRequestCallback ( future , new PylonStream ( ) , config ) ) ) . addField ( "csdl" , csdl ) ; performRequest ( future , request ) ; return future ;
public class ComputeExecutor { /** * Helper method to convert type labels to IDs * @ param labelSet * @ return a set of LabelIds */ private Set < LabelId > convertLabelsToIds ( Set < Label > labelSet ) { } }
return labelSet . stream ( ) . map ( tx :: convertToId ) . filter ( LabelId :: isValid ) . collect ( toSet ( ) ) ;
public class StandardDdlParser { /** * Parses DDL CREATE CHARACTER SET { @ link AstNode } based on SQL 92 specifications . * @ param tokens the { @ link DdlTokenStream } representing the tokenized DDL content ; may not be null * @ param parentNode the parent { @ link AstNode } node ; may not be null * @ return the parsed statement node { @ link AstNode } * @ throws ParsingException */ protected AstNode parseCreateCharacterSetStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } }
assert tokens != null ; assert parentNode != null ; markStartOfStatement ( tokens ) ; tokens . consume ( STMT_CREATE_CHARACTER_SET ) ; String name = parseName ( tokens ) ; AstNode node = nodeFactory ( ) . node ( name , parentNode , TYPE_CREATE_CHARACTER_SET_STATEMENT ) ; // TODO author = Horia Chiorean date = 1/4/12 description = Some of the optional attributes from the CND are not implemented yet node . setProperty ( EXISTING_NAME , consumeIdentifier ( tokens ) ) ; parseUntilTerminator ( tokens ) ; markEndOfStatement ( tokens , node ) ; return node ;
public class MapUtil { /** * 对两个Map进行比较 , 返回MapDifference , 然后各种妙用 . * 包括key的差集 , key的交集 , 以及key相同但value不同的元素 。 * @ see com . google . common . collect . MapDifference */ public static < K , V > MapDifference < K , V > difference ( Map < ? extends K , ? extends V > left , Map < ? extends K , ? extends V > right ) { } }
return Maps . difference ( left , right ) ;
public class FromCobolVisitor { /** * Retrieve the size of an array . * For variable size arrays this requires an ODOObject whose value * determines the size at runtime . * @ param type the array type * @ return the actual size of the array */ private int getOccurs ( CobolArrayType type ) { } }
if ( type . isVariableSize ( ) ) { return getOdoValue ( type . getDependingOn ( ) ) ; } else { return type . getMaxOccurs ( ) ; }
public class LaCountdownRaceLatch { public void await ( ) { } }
final CountDownLatch latch ; final boolean last ; synchronized ( this ) { latch = prepareLatch ( ) ; last = ( actuallyGetCount ( latch ) == 1 ) ; if ( last ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( "...Restarting count down race" ) ; } clearLatch ( ) ; } actuallyCountDown ( latch ) ; // ready go if last } if ( ! last ) { if ( isWaitingLatch ( ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( "...Awaiting all runners coming here" ) ; } actuallyAwait ( latch ) ; } }
public class MatrixFunctions { /** * Applies the < i > logarithm with basis to 10 < / i > element - wise on this * matrix . Note that this is an in - place operation . * @ see MatrixFunctions # log10 ( DoubleMatrix ) * @ return this matrix */ public static DoubleMatrix log10i ( DoubleMatrix x ) { } }
/* # mapfct ( ' Math . log10 ' ) # */ // RJPP - BEGIN - - - - - for ( int i = 0 ; i < x . length ; i ++ ) x . put ( i , ( double ) Math . log10 ( x . get ( i ) ) ) ; return x ; // RJPP - END - - - - -
public class EtcdClient { /** * Get the Members of Etcd * @ return vEtcdMembersResponse */ public EtcdMembersResponse getMembers ( ) { } }
try { return new EtcdMembersRequest ( this . client , retryHandler ) . send ( ) . get ( ) ; } catch ( IOException | EtcdException | EtcdAuthenticationException | TimeoutException e ) { return null ; }
public class Futures { /** * Returns a new { @ code ListenableFuture } whose result is asynchronously * derived from the result of the given { @ code Future } . More precisely , the * returned { @ code Future } takes its result from a { @ code Future } produced by * applying the given { @ code AsyncFunction } to the result of the original * { @ code Future } . Example : * < pre > { @ code * ListenableFuture < RowKey > rowKeyFuture = indexService . lookUp ( query ) ; * AsyncFunction < RowKey , QueryResult > queryFunction = * new AsyncFunction < RowKey , QueryResult > ( ) { * public ListenableFuture < QueryResult > apply ( RowKey rowKey ) { * return dataService . read ( rowKey ) ; * ListenableFuture < QueryResult > queryFuture = * transform ( rowKeyFuture , queryFunction , executor ) ; } < / pre > * < p > The returned { @ code Future } attempts to keep its cancellation state in * sync with that of the input future and that of the future returned by the * chain function . That is , if the returned { @ code Future } is cancelled , it * will attempt to cancel the other two , and if either of the other two is * cancelled , the returned { @ code Future } will receive a callback in which it * will attempt to cancel itself . * < p > When the execution of { @ code function . apply } is fast and lightweight * ( though the { @ code Future } it returns need not meet these criteria ) , * consider { @ linkplain # transform ( ListenableFuture , AsyncFunction ) omitting * the executor } or explicitly specifying { @ code directExecutor } . * However , be aware of the caveats documented in the link above . * @ param input The future to transform * @ param function A function to transform the result of the input future * to the result of the output future * @ param executor Executor to run the function in . * @ return A future that holds result of the function ( if the input succeeded ) * or the original input ' s failure ( if not ) * @ since 11.0 */ public static < I , O > ListenableFuture < O > transform ( ListenableFuture < I > input , AsyncFunction < ? super I , ? extends O > function , Executor executor ) { } }
checkNotNull ( executor ) ; ChainingListenableFuture < I , O > output = new ChainingListenableFuture < I , O > ( function , input ) ; input . addListener ( rejectionPropagatingRunnable ( output , output , executor ) , directExecutor ( ) ) ; return output ;
public class SocialAuthenticationFilter { /** * Indicates whether this filter should attempt to process a social network login request for the current invocation . * < p > Check if request URL matches filterProcessesUrl with valid providerId . * The URL must be like { filterProcessesUrl } / { providerId } . * @ return < code > true < / code > if the filter should attempt authentication , < code > false < / code > otherwise . */ @ Deprecated protected boolean requiresAuthentication ( HttpServletRequest request , HttpServletResponse response ) { } }
String providerId = getRequestedProviderId ( request ) ; if ( providerId != null ) { Set < String > authProviders = authServiceLocator . registeredAuthenticationProviderIds ( ) ; return authProviders . contains ( providerId ) ; } return false ;
public class YieldWait { /** * / * ( non - Javadoc ) * @ see cyclops2 . async . wait . WaitStrategy # take ( cyclops2 . async . wait . WaitStrategy . Takeable ) */ @ Override public T take ( final WaitStrategy . Takeable < T > t ) throws InterruptedException { } }
T result ; while ( ( result = t . take ( ) ) == null ) { Thread . yield ( ) ; } return result ;
public class TrustGraphNode { /** * This method performs the forwarding behavior for a received * message . The message is forwarded to the next hop on the * route according to the routing table , with ttl decreased by 1. * The message is dropped if it has an invalid hop count or * the sender of the message is not a known peer ( ie is not * paired to another outbound neighbor ) * @ return true if and only if the message was forwarded */ protected boolean forwardAdvertisement ( TrustGraphAdvertisement message ) { } }
// don ' t forward if the forwarding policy rejects if ( ! shouldForward ( message ) ) { return false ; } // determine the next hop to send to TrustGraphNodeId nextHop = getRoutingTable ( ) . getNextHop ( message ) ; // if there is no next hop , reject if ( nextHop == null ) { return false ; } // forward the message to the next node on the route // with ttl decreased by 1 sendAdvertisement ( message , nextHop , message . getInboundTTL ( ) - 1 ) ; return true ;
public class LongArrayList { /** * Inserts all of the elements in the specified Collection into this * list , starting at the specified position . Shifts the element * currently at that position ( if any ) and any subsequent elements to * the right ( increases their indices ) . The new elements will appear * in the list in the order that they are returned by the * specified Collection ' s iterator . * @ param index index at which to insert first element * from the specified collection . * @ param c elements to be inserted into this list . * @ return < tt > true < / tt > if this list changed as a result of the call . * @ throws IndexOutOfBoundsException if index out of range < tt > ( index * & lt ; 0 | | index & gt ; size ( ) ) < / tt > . * @ throws NullPointerException if the specified Collection is null . */ @ Override public boolean addAll ( int index , Collection < ? extends Long > c ) { } }
if ( index > size || index < 0 ) throw new IndexOutOfBoundsException ( "Index: " + index + ", Size: " + size ) ; int numNew = c . size ( ) ; ensureCapacity ( size + numNew ) ; // Increments modCount int numMoved = size - index ; if ( numMoved > 0 ) System . arraycopy ( elementData , index , elementData , index + numNew , numMoved ) ; Iterator < ? extends Long > iter = c . iterator ( ) ; int pos = index ; while ( iter . hasNext ( ) ) elementData [ pos ++ ] = iter . next ( ) ; size += numNew ; return numNew != 0 ;
public class ActionButton { /** * Initializes the stroke color * @ param attrs attributes of the XML tag that is inflating the view */ private void initStrokeColor ( TypedArray attrs ) { } }
int index = R . styleable . ActionButton_stroke_color ; if ( attrs . hasValue ( index ) ) { strokeColor = attrs . getColor ( index , strokeColor ) ; LOGGER . trace ( "Initialized Action Button stroke color: {}" , getStrokeColor ( ) ) ; }
public class StringParser { /** * Parse the given { @ link Object } as short with the specified radix . * @ param aObject * The object to parse . May be < code > null < / code > . * @ param nRadix * The radix to use . Must be & ge ; { @ link Character # MIN _ RADIX } and & le ; * { @ link Character # MAX _ RADIX } . * @ param nDefault * The default value to be returned if the passed object could not be * converted to a valid value . * @ return The default value if the object does not represent a valid value . */ public static short parseShort ( @ Nullable final Object aObject , @ Nonnegative final int nRadix , final short nDefault ) { } }
if ( aObject == null ) return nDefault ; if ( aObject instanceof Number ) return ( ( Number ) aObject ) . shortValue ( ) ; return parseShort ( aObject . toString ( ) , nRadix , nDefault ) ;
public class RendererUtils { /** * Convenient utility method that returns the currently selected values of * a UISelectMany component as a Set , of which the contains method can then be * easily used to determine if a value is currently selected . * Calling the contains method of this Set with the item value * as argument returns true if this item is selected . * @ param uiSelectMany * @ return Set containing all currently selected values */ public static Set getSelectedValuesAsSet ( FacesContext context , UIComponent component , Converter converter , UISelectMany uiSelectMany ) { } }
Object selectedValues = uiSelectMany . getValue ( ) ; return internalSubmittedOrSelectedValuesAsSet ( context , component , converter , uiSelectMany , selectedValues , true ) ;
public class AmazonRoute53DomainsClient { /** * This operation configures Amazon Route 53 to automatically renew the specified domain before the domain * registration expires . The cost of renewing your domain registration is billed to your AWS account . * The period during which you can renew a domain name varies by TLD . For a list of TLDs and their renewal policies , * see < a href = " http : / / wiki . gandi . net / en / domains / renew # renewal _ restoration _ and _ deletion _ times " > * " Renewal , restoration , and deletion times " < / a > on the website for our registrar associate , Gandi . Amazon Route 53 * requires that you renew before the end of the renewal period that is listed on the Gandi website so we can * complete processing before the deadline . * @ param enableDomainAutoRenewRequest * @ return Result of the EnableDomainAutoRenew operation returned by the service . * @ throws InvalidInputException * The requested item is not acceptable . For example , for an OperationId it might refer to the ID of an * operation that is already completed . For a domain name , it might not be a valid domain name or belong to * the requester account . * @ throws UnsupportedTLDException * Amazon Route 53 does not support this top - level domain ( TLD ) . * @ throws TLDRulesViolationException * The top - level domain does not support this operation . * @ sample AmazonRoute53Domains . EnableDomainAutoRenew * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53domains - 2014-05-15 / EnableDomainAutoRenew " * target = " _ top " > AWS API Documentation < / a > */ @ Override public EnableDomainAutoRenewResult enableDomainAutoRenew ( EnableDomainAutoRenewRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeEnableDomainAutoRenew ( request ) ;
public class ResourceXMLParser { /** * Parse the DOM attributes as properties for the particular entity node type * @ param ent Entity object * @ param node entity DOM node * @ throws ResourceXMLParserException if the DOM node is an unexpected tag name */ private void parseEntProperties ( final Entity ent , final Node node ) throws ResourceXMLParserException { } }
if ( null == entityProperties . get ( node . getName ( ) ) ) { throw new ResourceXMLParserException ( "Unexpected entity declaration: " + node . getName ( ) + ": " + reportNodeErrorLocation ( node ) ) ; } final Element node1 = ( Element ) node ; // load all element attributes as properties for ( final Object o : node1 . attributes ( ) ) { final Attribute attr = ( Attribute ) o ; ent . properties . setProperty ( attr . getName ( ) , attr . getStringValue ( ) ) ; }
public class CommerceUserSegmentCriterionLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows . * @ param dynamicQuery the dynamic query * @ return the matching rows */ @ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } }
return commerceUserSegmentCriterionPersistence . findWithDynamicQuery ( dynamicQuery ) ;
public class SignatureHelper { /** * Return the type name of the given ASM type . * @ param t * The ASM type . * @ return The type name . */ public static String getType ( final Type t ) { } }
switch ( t . getSort ( ) ) { case Type . ARRAY : return getType ( t . getElementType ( ) ) ; default : return t . getClassName ( ) ; }
public class GeneratedDOAuth2UserDaoImpl { /** * query - by method for field profileLink * @ param profileLink the specified attribute * @ return an Iterable of DOAuth2Users for the specified profileLink */ public Iterable < DOAuth2User > queryByProfileLink ( java . lang . String profileLink ) { } }
return queryByField ( null , DOAuth2UserMapper . Field . PROFILELINK . getFieldName ( ) , profileLink ) ;
public class TypeToStringUtils { /** * Print class with generic variables . For example , { @ code List < T > } . * @ param type class to print * @ return string containing class and it ' s declared generics */ public static String toStringWithNamedGenerics ( final Class < ? > type ) { } }
return toStringType ( new ParameterizedTypeImpl ( type , type . getTypeParameters ( ) ) , new PrintableGenericsMap ( ) ) ;
public class GVRGenericConstraint { /** * Sets the upper limits for the " moving " body translation relative to joint point . * @ param limitX the X upper lower translation limit * @ param limitY the Y upper lower translation limit * @ param limitZ the Z upper lower translation limit */ public void setLinearUpperLimits ( float limitX , float limitY , float limitZ ) { } }
Native3DGenericConstraint . setLinearUpperLimits ( getNative ( ) , limitX , limitY , limitZ ) ;
public class BaseAPI { /** * 通用get请求 * @ param url 地址 , 其中token用 # 代替 * @ return 请求结果 */ protected BaseResponse executeGet ( String url ) { } }
BaseResponse response ; BeanUtil . requireNonNull ( url , "url is null" ) ; // 需要传token String getUrl = url . replace ( "#" , config . getAccessToken ( ) ) ; response = NetWorkCenter . get ( getUrl ) ; return response ;
public class RuntimeConfiguration { /** * Given an array of parser specifications , it returns the corresponding list of parsers ( only * the correct specifications are put in the list . * @ param specs the parser specifications ( they will be parsed using { @ link ObjectParser } . * @ return a list of parsers built according to the specifications ( only the parseable items are put in the list ) . */ public static ArrayList < Parser < ? > > parsersFromSpecs ( String [ ] specs ) throws IllegalArgumentException , ClassNotFoundException , IllegalAccessException , InvocationTargetException , InstantiationException , NoSuchMethodException , IOException { } }
final ArrayList < Parser < ? > > parsers = new ArrayList < > ( ) ; for ( final String spec : specs ) parsers . add ( ObjectParser . fromSpec ( spec , Parser . class , new String [ ] { "it.unimi.di.law.bubing.parser" } ) ) ; return parsers ;
public class ShowNextSlideCommand { /** * { @ inheritDoc } */ @ Override protected void perform ( final Wave wave ) { } }
if ( getModel ( SlideStackModel . class ) . isReadyForSlideUpdate ( false ) ) { getModel ( SlideStackModel . class ) . next ( wave . get ( PrezWaves . SKIP_SLIDE_STEP ) ) ; }
public class AzureClient { /** * Given a polling state representing state of a PUT or PATCH operation , this method returns { @ link Single } object , * when subscribed to it , a single poll will be performed and emits the latest polling state . A poll will be * performed only if the current polling state is not in terminal state . * Note : this method does not implicitly introduce concurrency , by default the deferred action will be executed * in scheduler ( if any ) set for the provided observable . * @ param pollingState the current polling state * @ param < T > the type of the resource * @ param resourceType the java . lang . reflect . Type of the resource . * @ return the observable of which a subscription will lead single polling action . */ private < T > Single < PollingState < T > > pollPutOrPatchSingleAsync ( final PollingState < T > pollingState , final Type resourceType ) { } }
pollingState . withResourceType ( resourceType ) ; pollingState . withSerializerAdapter ( restClient ( ) . serializerAdapter ( ) ) ; if ( pollingState . isStatusTerminal ( ) ) { if ( pollingState . isStatusSucceeded ( ) && pollingState . resource ( ) == null ) { return updateStateFromGetResourceOperationAsync ( pollingState , pollingState . putOrPatchResourceUri ( ) ) . toSingle ( ) ; } return Single . just ( pollingState ) ; } return putOrPatchPollingDispatcher ( pollingState , pollingState . putOrPatchResourceUri ( ) ) . map ( new Func1 < PollingState < T > , PollingState < T > > ( ) { @ Override public PollingState < T > call ( PollingState < T > tPollingState ) { tPollingState . throwCloudExceptionIfInFailedState ( ) ; return tPollingState ; } } ) . flatMap ( new Func1 < PollingState < T > , Observable < PollingState < T > > > ( ) { @ Override public Observable < PollingState < T > > call ( PollingState < T > tPollingState ) { if ( pollingState . isStatusSucceeded ( ) && pollingState . resource ( ) == null ) { return updateStateFromGetResourceOperationAsync ( pollingState , pollingState . putOrPatchResourceUri ( ) ) ; } return Observable . just ( tPollingState ) ; } } ) . toSingle ( ) ;
public class ServiceTools { /** * Get template for unknown type ( class or parameterizedType ) * @ param type * @ param jsonUnmarshaller * @ return */ String _getTemplateOfType ( Type type , IJsonMarshaller jsonMarshaller ) { } }
if ( ParameterizedType . class . isAssignableFrom ( type . getClass ( ) ) ) { return getTemplateOfParameterizedType ( ( ParameterizedType ) type , jsonMarshaller ) ; } try { return jsonMarshaller . toJson ( getInstanceOfClass ( ( Class ) type ) ) ; } catch ( JsonMarshallingException ex ) { } return ( ( Class ) type ) . getSimpleName ( ) . toLowerCase ( Locale . ENGLISH ) ;
public class DefaultEventMulticaster { /** * { @ inheritDoc } */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public void multicast ( Event e ) { List < EventListener < ? > > adapted = getListeners ( e ) ; for ( EventListener listener : adapted ) listener . onEvent ( e ) ;
public class CSP2SourceList { /** * Add the provided Base64 encoded hash value . The { @ value # HASH _ PREFIX } and * { @ link # HASH _ SUFFIX } are added automatically . * @ param eMDAlgo * The message digest algorithm used . May only * { @ link EMessageDigestAlgorithm # SHA _ 256 } , * { @ link EMessageDigestAlgorithm # SHA _ 384 } or * { @ link EMessageDigestAlgorithm # SHA _ 512 } . May not be < code > null < / code > . * @ param sHashBase64Value * The Base64 encoded hash value * @ return this for chaining */ @ Nonnull public CSP2SourceList addHash ( @ Nonnull final EMessageDigestAlgorithm eMDAlgo , @ Nonnull final String sHashBase64Value ) { } }
ValueEnforcer . notNull ( eMDAlgo , "MDAlgo" ) ; ValueEnforcer . notEmpty ( sHashBase64Value , "HashBase64Value" ) ; String sAlgorithmName ; switch ( eMDAlgo ) { case SHA_256 : sAlgorithmName = "sha256" ; break ; case SHA_384 : sAlgorithmName = "sha384" ; break ; case SHA_512 : sAlgorithmName = "sha512" ; break ; default : throw new IllegalArgumentException ( "Only SHA256, SHA384 and SHA512 are supported algorithms" ) ; } m_aList . add ( HASH_PREFIX + sAlgorithmName + "-" + sHashBase64Value + HASH_SUFFIX ) ; return this ;
public class ExpressionUtil { /** * Create a value expression . * @ param pageContext the context in which the expression will be parsed * @ param expression the expression * @ param expectedType the expected type of result * @ return a parsed expression */ public static ValueExpression createValueExpression ( PageContext pageContext , String expression , Class < ? > expectedType ) { } }
ExpressionFactory factory = getExpressionFactory ( pageContext ) ; return factory . createValueExpression ( pageContext . getELContext ( ) , expression , expectedType ) ;
public class Chart { /** * Sets the theme for this chart by specifying a reference to a javascript * file containing the theme . The javascript file must contain the following * code : < pre > < code > * Highcharts . setOptions ( myOptions ) ; * < / code > < / pre > where < code > myOptions < / code > is a JSON representation of * the theme options . * A theme can only be set via one setTheme method . An * { @ link IllegalStateException } will be thrown if you call two setTheme * methods . * @ see < a * href = " http : / / www . highcharts . com / demo " > http : / / www . highcharts . com / demo < / a > * @ param theme * reference to a javascript file containing a theme . */ public void setTheme ( final JavascriptResourceReference theme ) { } }
if ( this . theme != null || this . themeUrl != null ) { throw new IllegalStateException ( "A theme can only be defined once. Calling different setTheme methods is not allowed!" ) ; } this . themeReference = theme ;
public class Strman { /** * Verifies that one or more of needles are contained in value . * @ param value input * @ param needles needles to search * @ param caseSensitive true or false * @ return boolean true if any needle is found else false */ public static boolean containsAny ( final String value , final String [ ] needles , final boolean caseSensitive ) { } }
validate ( value , NULL_STRING_PREDICATE , NULL_STRING_MSG_SUPPLIER ) ; return Arrays . stream ( needles ) . anyMatch ( needle -> contains ( value , needle , caseSensitive ) ) ;
public class ServiceWrapper { /** * newFromPushedData - creating new SW for next dispatch / Runnable . * Xferred context data and classloaders to new SW . */ static ServiceWrapper newFromPushedData ( AsyncContextImpl asynContext ) { } }
// PM90834 : added method ServiceWrapper orginalServiceWrapper = asynContext . serviceWrapper ; ServiceWrapper sw = new ServiceWrapper ( asynContext ) ; sw . setContextData ( orginalServiceWrapper . getContextData ( ) ) ; sw . originalCL = orginalServiceWrapper . originalCL ; sw . newCL = orginalServiceWrapper . newCL ; // PI92334 newCL is the current thread ' s classLoader if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINEST ) ) { logger . logp ( Level . FINEST , CLASS_NAME , "newFromPushedData" , "created ServiceWrapper [" + sw + "] , original ServiceWrapper [" + orginalServiceWrapper + "]" ) ; logger . logp ( Level . FINEST , CLASS_NAME , "newFromPushedData" , "transferred to new ServiceWrapper, originalCL [" + sw . originalCL + "] , newCL [" + sw . newCL + "]" ) ; } return sw ;
public class BitConverter { /** * Initialize this converter . * @ param converter The field to set the bit in ( Should be a NumberField ) . * @ param iBitNumber The bit number to set with this converter ( 0 = L . O . bit ) . * @ param trueIfMatch Return true if this bit is on ( if this variable is true ) . */ public void init ( Converter converter , int iBitNumber , boolean trueIfMatch , boolean bTrueIfNull ) { } }
super . init ( converter ) ; m_iBitNumber = iBitNumber ; m_bTrueIfMatch = trueIfMatch ; m_bTrueIfNull = bTrueIfNull ;
public class Primitives { /** * Creates a new stream of { @ link Byte } type that contains all items of a given array . * @ param bytes an array to get items from . * @ return a new stream of { @ link Byte } type that contains all items of a given array . */ public static Stream < Byte > box ( final byte [ ] bytes ) { } }
return new FixedSizeStream < > ( bytes . length , new Func1 < Integer , Byte > ( ) { @ Override public Byte call ( Integer index ) { return bytes [ index ] ; } } ) ;
public class CsvFiles { /** * Writes the CSV data located in { @ code csvData } to the file located at * { @ code fileName } . * @ param csvData the CSV data including the header * @ param fileName the file to write the CSV data to * @ throws IOException if there was an error writing to the file * @ throws NullPointerException if { @ code csvData = = null } or { @ code fileName = = null } */ public static void writeCsv ( List < String [ ] > csvData , String fileName ) throws IOException { } }
Preconditions . checkNotNull ( csvData , "Null CSV data" ) ; Preconditions . checkNotNull ( fileName , "Null file name" ) ; CSVWriter writer = null ; try { writer = new CSVWriter ( Files . newWriter ( new File ( fileName ) , StandardCharsets . UTF_8 ) ) ; for ( String [ ] line : csvData ) { writer . writeNext ( line ) ; } } finally { if ( writer != null ) { writer . close ( ) ; } }
public class CommerceNotificationTemplateUserSegmentRelUtil { /** * Returns the first commerce notification template user segment rel in the ordered set where commerceUserSegmentEntryId = & # 63 ; . * @ param commerceUserSegmentEntryId the commerce user segment entry ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce notification template user segment rel * @ throws NoSuchNotificationTemplateUserSegmentRelException if a matching commerce notification template user segment rel could not be found */ public static CommerceNotificationTemplateUserSegmentRel findByCommerceUserSegmentEntryId_First ( long commerceUserSegmentEntryId , OrderByComparator < CommerceNotificationTemplateUserSegmentRel > orderByComparator ) throws com . liferay . commerce . notification . exception . NoSuchNotificationTemplateUserSegmentRelException { } }
return getPersistence ( ) . findByCommerceUserSegmentEntryId_First ( commerceUserSegmentEntryId , orderByComparator ) ;
public class PippoSettings { /** * Recursively read " include " properties files . * " Include " properties are the base properties which are overwritten by * the provided properties . * @ param baseDir * @ param properties * @ return the merged properties * @ throws IOException */ private Properties loadIncludes ( File baseDir , Properties properties ) throws IOException { } }
return loadProperties ( baseDir , properties , "include" , false ) ;
public class AbstractIoSession { /** * TODO Add method documentation */ public final void decreaseReadBufferSize ( ) { } }
if ( deferDecreaseReadBuffer ) { deferDecreaseReadBuffer = false ; return ; } if ( getConfig ( ) . getReadBufferSize ( ) > getConfig ( ) . getMinReadBufferSize ( ) ) { getConfig ( ) . setReadBufferSize ( getConfig ( ) . getReadBufferSize ( ) >>> 1 ) ; } deferDecreaseReadBuffer = true ;
public class ConcurrencyMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Concurrency concurrency , ProtocolMarshaller protocolMarshaller ) { } }
if ( concurrency == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( concurrency . getReservedConcurrentExecutions ( ) , RESERVEDCONCURRENTEXECUTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ShrinkWrap { /** * Creates a new archive of the specified type . The archive will be be backed by the default { @ link Configuration } . * specific to this { @ link ArchiveFactory } . Generates a random name for the archive and adds proper extension based * on the service descriptor properties file if extension property is present ( e . g . * shrinkwrap / impl - base / src / main / resources / META - INF / services / org . jboss . shrinkwrap . api . spec . JavaArchive ) * Invoking this method is functionally equivalent to calling { @ link ArchiveFactory # create ( Class ) } upon * { @ link Domain # getArchiveFactory ( ) } upon the domain returned by { @ link ShrinkWrap # getDefaultDomain ( ) } . * @ param type * The type of the archive e . g . { @ link WebArchive } * @ return An { @ link Assignable } archive base * @ throws IllegalArgumentException * if type is not specified * @ throws UnknownExtensionTypeException * If no extension mapping is found for the specified type */ public static < T extends Assignable > T create ( final Class < T > type ) throws IllegalArgumentException , UnknownExtensionTypeException { } }
// Precondition checks if ( type == null ) { throw new IllegalArgumentException ( "Type must be specified" ) ; } // Delegate to the default domain ' s archive factory for creation return ShrinkWrap . getDefaultDomain ( ) . getArchiveFactory ( ) . create ( type ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcPolygonalBoundedHalfSpace ( ) { } }
if ( ifcPolygonalBoundedHalfSpaceEClass == null ) { ifcPolygonalBoundedHalfSpaceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 435 ) ; } return ifcPolygonalBoundedHalfSpaceEClass ;
public class DetectCircleHexagonalGrid { /** * Puts the grid into a canonical orientation */ @ Override protected void putGridIntoCanonical ( Grid g ) { } }
// first put it into a plausible solution if ( g . columns != numCols ) { rotateGridCCW ( g ) ; } if ( g . get ( 0 , 0 ) == null ) { reverse ( g ) ; } // select the best corner for canonical if ( g . columns % 2 == 1 && g . rows % 2 == 1 ) { // first make sure orientation constraint is maintained if ( isClockWise ( g ) ) { flipHorizontal ( g ) ; } int numRotationsCCW = closestCorner4 ( g ) ; if ( g . columns == g . rows ) { for ( int i = 0 ; i < numRotationsCCW ; i ++ ) { rotateGridCCW ( g ) ; } } else if ( numRotationsCCW == 2 ) { // only two valid solutions . rotate only if the other valid solution is better rotateGridCCW ( g ) ; rotateGridCCW ( g ) ; } } else if ( g . columns % 2 == 1 ) { // only two solutions . Go with the one which maintains orientation constraint if ( isClockWise ( g ) ) { flipHorizontal ( g ) ; } } else if ( g . rows % 2 == 1 ) { // only two solutions . Go with the one which maintains orientation constraint if ( isClockWise ( g ) ) { flipVertical ( g ) ; } }
public class TrainingsImpl { /** * Get images by id for a given project iteration . * This API will return a set of Images for the specified tags and optionally iteration . If no iteration is specified the * current workspace is used . * @ param projectId The project id * @ param getImagesByIdsOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; Image & gt ; object if successful . */ public List < Image > getImagesByIds ( UUID projectId , GetImagesByIdsOptionalParameter getImagesByIdsOptionalParameter ) { } }
return getImagesByIdsWithServiceResponseAsync ( projectId , getImagesByIdsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class KerasEmbedding { /** * Get Keras input dimension from Keras layer configuration . * @ param layerConfig dictionary containing Keras layer configuration * @ return input dim as int */ private int getInputDimFromConfig ( Map < String , Object > layerConfig ) throws InvalidKerasConfigurationException { } }
Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( conf . getLAYER_FIELD_INPUT_DIM ( ) ) ) throw new InvalidKerasConfigurationException ( "Keras Embedding layer config missing " + conf . getLAYER_FIELD_INPUT_DIM ( ) + " field" ) ; return ( int ) innerConfig . get ( conf . getLAYER_FIELD_INPUT_DIM ( ) ) ;
public class NetworkCache { /** * Writes an InputStream from a network response to a temporary file . If the file successfully parses * to an composition , { @ link # renameTempFile ( FileExtension ) } should be called to move the file * to its final location for future cache hits . */ File writeTempCacheFile ( InputStream stream , FileExtension extension ) throws IOException { } }
String fileName = filenameForUrl ( url , extension , true ) ; File file = new File ( appContext . getCacheDir ( ) , fileName ) ; try { OutputStream output = new FileOutputStream ( file ) ; // noinspection TryFinallyCanBeTryWithResources try { byte [ ] buffer = new byte [ 1024 ] ; int read ; while ( ( read = stream . read ( buffer ) ) != - 1 ) { output . write ( buffer , 0 , read ) ; } output . flush ( ) ; } finally { output . close ( ) ; } } finally { stream . close ( ) ; } return file ;
public class JFrmMain { /** * Exit the Application */ private void exitForm ( java . awt . event . WindowEvent evt ) { } }
// GEN - FIRST : event _ exitForm Main . getProperties ( ) . setProperty ( Main . PROPERTY_MAINFRAME_HEIGHT , "" + this . getHeight ( ) ) ; Main . getProperties ( ) . setProperty ( Main . PROPERTY_MAINFRAME_WIDTH , "" + this . getWidth ( ) ) ; Main . getProperties ( ) . setProperty ( Main . PROPERTY_MAINFRAME_POSX , "" + this . getBounds ( ) . x ) ; Main . getProperties ( ) . setProperty ( Main . PROPERTY_MAINFRAME_POSY , "" + this . getBounds ( ) . y ) ; Main . getProperties ( ) . storeProperties ( "" ) ; System . exit ( 0 ) ;
public class AccumulatorManager { /** * Cleanup data for the oldest jobs if the maximum number of entries is * reached . */ private void cleanup ( JobID jobId ) { } }
if ( ! lru . contains ( jobId ) ) { lru . addFirst ( jobId ) ; } if ( lru . size ( ) > this . maxEntries ) { JobID toRemove = lru . removeLast ( ) ; this . jobAccumulators . remove ( toRemove ) ; }
public class ProcessKqueue { private void checkStdinCloses ( ) { } }
List < OsxProcess > processes = new ArrayList < > ( ) ; // drainTo ( ) is known to be atomic for ArrayBlockingQueue closeQueue . drainTo ( processes ) ; for ( OsxProcess process : processes ) { process . getStdin ( ) . close ( ) ; }
public class CacheSimpleConfig { /** * Set classname of a class to be used as { @ link javax . cache . integration . CacheLoader } . * @ param cacheLoader classname to be used as { @ link javax . cache . integration . CacheLoader } * @ return the current cache config instance */ public CacheSimpleConfig setCacheLoader ( String cacheLoader ) { } }
if ( cacheLoader != null && cacheLoaderFactory != null ) { throw new IllegalStateException ( "Cannot set cacheLoader to '" + cacheLoader + "', because cacheLoaderFactory is already set to '" + cacheLoaderFactory + "'." ) ; } this . cacheLoader = cacheLoader ; return this ;
public class OutputPanel { /** * Appends the given { @ code message } to the panel , asynchronously in the EDT . * @ param message the message to append to the output panel * @ since 2.5.0 * @ see EventQueue # invokeLater ( Runnable ) */ public void appendAsync ( final String message ) { } }
EventQueue . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { doAppend ( message ) ; } } ) ;