signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class FieldCode { /** * To code . * @ return the string */ public String toCode ( ) { } }
String ret = "" ; if ( StringUtils . isEmpty ( scope ) ) { ret = "private " ; } else { ret = scope + ClassCode . BLANK_STRING ; } ret += type + ClassCode . BLANK_STRING + name ; if ( ! StringUtils . isEmpty ( defaultValue ) ) { ret += "=" + defaultValue ; } return ret ;
public class CmsADECache { /** * Removes the container page identified by its structure id from the cache . < p > * @ param structureId the container page ' s structure id * @ param online if online or offline */ public void uncacheContainerPage ( CmsUUID structureId , boolean online ) { } }
try { m_lock . writeLock ( ) . lock ( ) ; if ( online ) { m_containerPagesOnline . remove ( getCacheKey ( structureId , true ) ) ; m_containerPagesOnline . remove ( getCacheKey ( structureId , false ) ) ; } else { m_containerPagesOffline . remove ( getCacheKey ( structureId , true ) ) ; m_containerPagesOffline . remove ( getCacheKey ( structureId , false ) ) ; } } finally { m_lock . writeLock ( ) . unlock ( ) ; }
public class diff_match_patch { /** * Add some padding on text start and end so that edges can match something . * Intended to be called only from within patch _ apply . * @ param patches * Array of Patch objects . * @ return The padding string added to each side . */ public String patch_addPadding ( LinkedList < Patch > patches ) { } }
short paddingLength = Patch_Margin ; String nullPadding = "" ; for ( short x = 1 ; x <= paddingLength ; x ++ ) { nullPadding += String . valueOf ( ( char ) x ) ; } // Bump all the patches forward . for ( Patch aPatch : patches ) { aPatch . start1 += paddingLength ; aPatch . start2 += paddingLength ; } // Add some padding on start of first diff . Patch patch = patches . getFirst ( ) ; LinkedList < Diff > diffs = patch . diffs ; if ( diffs . isEmpty ( ) || diffs . getFirst ( ) . operation != Operation . EQUAL ) { // Add nullPadding equality . diffs . addFirst ( new Diff ( Operation . EQUAL , nullPadding ) ) ; patch . start1 -= paddingLength ; // Should be 0. patch . start2 -= paddingLength ; // Should be 0. patch . length1 += paddingLength ; patch . length2 += paddingLength ; } else if ( paddingLength > diffs . getFirst ( ) . text . length ( ) ) { // Grow first equality . Diff firstDiff = diffs . getFirst ( ) ; int extraLength = paddingLength - firstDiff . text . length ( ) ; firstDiff . text = nullPadding . substring ( firstDiff . text . length ( ) ) + firstDiff . text ; patch . start1 -= extraLength ; patch . start2 -= extraLength ; patch . length1 += extraLength ; patch . length2 += extraLength ; } // Add some padding on end of last diff . patch = patches . getLast ( ) ; diffs = patch . diffs ; if ( diffs . isEmpty ( ) || diffs . getLast ( ) . operation != Operation . EQUAL ) { // Add nullPadding equality . diffs . addLast ( new Diff ( Operation . EQUAL , nullPadding ) ) ; patch . length1 += paddingLength ; patch . length2 += paddingLength ; } else if ( paddingLength > diffs . getLast ( ) . text . length ( ) ) { // Grow last equality . Diff lastDiff = diffs . getLast ( ) ; int extraLength = paddingLength - lastDiff . text . length ( ) ; lastDiff . text += nullPadding . substring ( 0 , extraLength ) ; patch . length1 += extraLength ; patch . length2 += extraLength ; } return nullPadding ;
public class NettyClientHandler { /** * Gets the client stream associated to the given HTTP / 2 stream object . */ private NettyClientStream . TransportState clientStream ( Http2Stream stream ) { } }
return stream == null ? null : ( NettyClientStream . TransportState ) stream . getProperty ( streamKey ) ;
public class ParameterReplacer { /** * - - - - - JDBC 3.0 - - - - - */ public void setURL ( int parameterIndex , java . net . URL x ) { } }
record ( parameterIndex , getDeclaredMethod ( PreparedStatement . class , "setURL" , int . class , URL . class ) , parameterIndex , x ) ;
public class TypedScopeCreator { /** * Returns the { @ link Module } corresponding to this scope root , or null if not a module root . */ @ Nullable private Module getModuleFromScopeRoot ( Node moduleBody ) { } }
if ( moduleBody . isModuleBody ( ) ) { // TODO ( b / 128633181 ) : handle ES modules here Node scriptNode = moduleBody . getParent ( ) ; checkState ( scriptNode . getBooleanProp ( Node . GOOG_MODULE ) , "Typechecking of non-goog-modules not supported" ) ; Node googModuleCall = moduleBody . getFirstChild ( ) ; String namespace = googModuleCall . getFirstChild ( ) . getSecondChild ( ) . getString ( ) ; return moduleMap . getClosureModule ( namespace ) ; } else if ( isGoogLoadModuleBlock ( moduleBody ) ) { Node googModuleCall = moduleBody . getFirstChild ( ) ; String namespace = googModuleCall . getFirstChild ( ) . getSecondChild ( ) . getString ( ) ; return moduleMap . getClosureModule ( namespace ) ; } return null ;
public class BasicPathFinder { /** * Runs a recursive depth - first search from a { @ link KamNode } in search of * the < tt > target < / tt > node . When a { @ link SimplePath } is found to the * < tt > target < / tt > the { @ link Stack } of { @ link KamEdge } is collected and * the algorithm continues . < br / > < br / > * This depth - first search exhaustively walks the entire { @ link Kam } * and finds all paths from < tt > source < / tt > to < tt > target < / tt > . * @ param kam { @ link Kam } , the kam to traverse * @ param cnode { @ link KamNode } the current node to evaluate * @ param source { @ link KamNode } the source to search from * @ param targets { @ link Set } of { @ link KamNode } , the targets to search to * @ param nodeStack { @ link Stack } of { @ link KamNode } that holds the nodes * on the current path from the < tt > source < / tt > * @ param edgeStack { @ link Stack } of { @ link KamEdge } that holds the edges * on the current path from the < tt > source < / tt > * @ param pathResults the resulting paths from source to targets */ private void runDepthFirstSearch ( final Kam kam , final KamNode cnode , final KamNode source , final Set < KamNode > targets , int depth , final SetStack < KamNode > nodeStack , final SetStack < KamEdge > edgeStack , final List < SimplePath > pathResults ) { } }
depth += 1 ; if ( depth > maxSearchDepth ) { return ; } // get adjacent edges final Set < KamEdge > edges = kam . getAdjacentEdges ( cnode , BOTH ) ; for ( final KamEdge edge : edges ) { if ( pushEdge ( edge , nodeStack , edgeStack ) ) { final KamNode edgeOppositeNode = nodeStack . peek ( ) ; // we have found a path from source to target if ( targets . contains ( edgeOppositeNode ) ) { final SimplePath newPath = new SimplePath ( kam , source , nodeStack . peek ( ) , edgeStack . toStack ( ) ) ; pathResults . add ( newPath ) ; } else { runDepthFirstSearch ( kam , edgeOppositeNode , source , targets , depth , nodeStack , edgeStack , pathResults ) ; } nodeStack . pop ( ) ; edgeStack . pop ( ) ; } }
public class LexicaHelper { /** * TODO should not be static */ private static String getTokenDescPath ( AnalysisEngineDescription tokenDesc ) throws IOException , ResourceInitializationException , SAXException { } }
if ( tokenDescrPath != null ) return tokenDescrPath . getAbsolutePath ( ) ; tokenDescrPath = File . createTempFile ( "tmp" , "TokenizerAnnotator.xml" ) ; tokenDesc . toXML ( new FileWriter ( tokenDescrPath ) ) ; return tokenDescrPath . getAbsolutePath ( ) ;
public class StringTokenizer { /** * Returns the next token in this string tokenizer ' s string . First , * the set of characters considered to be delimiters by this * < tt > StringTokenizer < / tt > object is changed to be the characters in * the string < tt > delim < / tt > . Then the next token in the string * after the current position is returned . The current position is * advanced beyond the recognized token . The new delimiter set * remains the default after this call . * @ param delim the new delimiters . * @ return the next token , after switching to the new delimiter set . * @ exception NoSuchElementException if there are no more tokens in * this tokenizer ' s string . */ public String nextToken ( String delim ) { } }
m_delimiters_ = EMPTY_DELIMITER_ ; if ( delim != null && delim . length ( ) > 0 ) { m_delimiters_ = new UnicodeSet ( ) ; m_delimiters_ . addAll ( delim ) ; } return nextToken ( m_delimiters_ ) ;
public class ThriftClient { /** * Populate data . * @ param m * the m * @ param keySlices * the key slices * @ param entities * the entities * @ param isRelational * the is relational * @ param relationNames * the relation names * @ return the list */ private List populateData ( EntityMetadata m , List < KeySlice > keySlices , List < Object > entities , boolean isRelational , List < String > relationNames ) { } }
try { if ( m . getType ( ) . isSuperColumnFamilyMetadata ( ) ) { List < Object > rowKeys = ThriftDataResultHelper . getRowKeys ( keySlices , m ) ; Object [ ] rowIds = rowKeys . toArray ( ) ; entities . addAll ( findAll ( m . getEntityClazz ( ) , null , rowIds ) ) ; } else { for ( KeySlice keySlice : keySlices ) { byte [ ] key = keySlice . getKey ( ) ; List < ColumnOrSuperColumn > coscList = keySlice . getColumns ( ) ; List < Column > columns = ThriftDataResultHelper . transformThriftResult ( coscList , ColumnFamilyType . COLUMN , null ) ; Object e = null ; Object id = PropertyAccessorHelper . getObject ( m . getIdAttribute ( ) . getJavaType ( ) , key ) ; e = dataHandler . populateEntity ( new ThriftRow ( id , m . getTableName ( ) , columns , new ArrayList < SuperColumn > ( 0 ) , new ArrayList < CounterColumn > ( 0 ) , new ArrayList < CounterSuperColumn > ( 0 ) ) , m , KunderaCoreUtils . getEntity ( e ) , relationNames , isRelational ) ; entities . add ( e ) ; } } } catch ( Exception e ) { log . error ( "Error while populating data for relations of column family {}, Caused by: ." , m . getTableName ( ) , e ) ; throw new KunderaException ( e ) ; } return entities ;
public class ControlBeanContextSupport { /** * Analagous to < code > java . lang . ClassLoader . getResourceAsStream ( ) < / code > , * this method allows a < code > BeanContext < / code > implementation * to interpose behavior between the child < code > Component < / code > * and underlying < code > ClassLoader < / code > . * @ param name the resource name * @ param bcc the specified child * @ return an < code > InputStream < / code > for reading the resource , * or < code > null < / code > if the resource could not * be found . * @ throws IllegalArgumentException if the resource is not valid */ public InputStream getResourceAsStream ( String name , BeanContextChild bcc ) throws IllegalArgumentException { } }
// bcc must be a child of this context if ( ! contains ( bcc ) ) { throw new IllegalArgumentException ( "Child is not a member of this context" ) ; } ClassLoader cl = bcc . getClass ( ) . getClassLoader ( ) ; InputStream is ; if ( cl != null && ( is = cl . getResourceAsStream ( name ) ) != null ) { return is ; } return ClassLoader . getSystemResourceAsStream ( name ) ;
public class GraphHandler { /** * Writes the given byte array into a file . * This function logs an error but doesn ' t throw if it fails . * @ param query The query being handled ( for logging purposes ) . * @ param path The path to write to . * @ param contents The contents to write into the file . */ private static void writeFile ( final HttpQuery query , final String path , final byte [ ] contents ) { } }
try { final FileOutputStream out = new FileOutputStream ( path ) ; try { out . write ( contents ) ; } finally { out . close ( ) ; } } catch ( FileNotFoundException e ) { logError ( query , "Failed to create file " + path , e ) ; } catch ( IOException e ) { logError ( query , "Failed to write file " + path , e ) ; }
public class AbstractMarkerLanguageParser { /** * Extract the validation components that are specific to the marker language . * @ param inputFile the input file . * @ param rootFolder the root folder in which the input file is located . * @ param context the generation context . * @ return the validation components . */ public final List < DynamicValidationComponent > getMarkerSpecificValidationComponents ( File inputFile , File rootFolder , DynamicValidationContext context ) { } }
return getSpecificValidationComponents ( transform ( inputFile , false ) , inputFile , rootFolder , context ) ;
public class ReverseMap { /** * Creates a reverse map name corresponding to an address contained in * an array of 4 integers between 0 and 255 ( for an IPv4 address ) or 16 * integers between 0 and 255 ( for an IPv6 address ) . * @ param addr The address from which to build a name . * @ return The name corresponding to the address in the reverse map . */ public static Name fromAddress ( int [ ] addr ) { } }
byte [ ] bytes = new byte [ addr . length ] ; for ( int i = 0 ; i < addr . length ; i ++ ) { if ( addr [ i ] < 0 || addr [ i ] > 0xFF ) throw new IllegalArgumentException ( "array must " + "contain values " + "between 0 and 255" ) ; bytes [ i ] = ( byte ) addr [ i ] ; } return fromAddress ( bytes ) ;
public class ObjectInputStream { /** * Reads the content of the receiver based on the previously read token * { @ code tc } . * @ param tc * The token code for the next item in the stream * @ return the object read from the stream * @ throws IOException * If an IO exception happened when reading the class * descriptor . * @ throws ClassNotFoundException * If the class corresponding to the object being read could not * be found . */ private Object readContent ( byte tc ) throws ClassNotFoundException , IOException { } }
switch ( tc ) { case TC_BLOCKDATA : return readBlockData ( ) ; case TC_BLOCKDATALONG : return readBlockDataLong ( ) ; case TC_CLASS : return readNewClass ( false ) ; case TC_CLASSDESC : return readNewClassDesc ( false ) ; case TC_ARRAY : return readNewArray ( false ) ; case TC_OBJECT : return readNewObject ( false ) ; case TC_STRING : return readNewString ( false ) ; case TC_LONGSTRING : return readNewLongString ( false ) ; case TC_REFERENCE : return readCyclicReference ( ) ; case TC_NULL : return null ; case TC_EXCEPTION : Exception exc = readException ( ) ; throw new WriteAbortedException ( "Read an exception" , exc ) ; case TC_RESET : resetState ( ) ; return null ; default : throw corruptStream ( tc ) ; }
public class SVGAndroidRenderer { /** * Any unspecified fields in this pattern can be ' borrowed ' from another * pattern specified by the href attribute . */ private void fillInChainedPatternFields ( Pattern pattern , String href ) { } }
// Locate the referenced object SVG . SvgObject ref = pattern . document . resolveIRI ( href ) ; if ( ref == null ) { // Non - existent warn ( "Pattern reference '%s' not found" , href ) ; return ; } if ( ! ( ref instanceof Pattern ) ) { error ( "Pattern href attributes must point to other pattern elements" ) ; return ; } if ( ref == pattern ) { error ( "Circular reference in pattern href attribute '%s'" , href ) ; return ; } Pattern pRef = ( Pattern ) ref ; if ( pattern . patternUnitsAreUser == null ) pattern . patternUnitsAreUser = pRef . patternUnitsAreUser ; if ( pattern . patternContentUnitsAreUser == null ) pattern . patternContentUnitsAreUser = pRef . patternContentUnitsAreUser ; if ( pattern . patternTransform == null ) pattern . patternTransform = pRef . patternTransform ; if ( pattern . x == null ) pattern . x = pRef . x ; if ( pattern . y == null ) pattern . y = pRef . y ; if ( pattern . width == null ) pattern . width = pRef . width ; if ( pattern . height == null ) pattern . height = pRef . height ; // attributes from superclasses if ( pattern . children . isEmpty ( ) ) pattern . children = pRef . children ; if ( pattern . viewBox == null ) pattern . viewBox = pRef . viewBox ; if ( pattern . preserveAspectRatio == null ) { pattern . preserveAspectRatio = pRef . preserveAspectRatio ; } if ( pRef . href != null ) fillInChainedPatternFields ( pattern , pRef . href ) ;
public class Log4jUtil { /** * 根据配置文件路径初始化日志配置 * @ param logConfigpath */ public static void reInitLogConfig ( String logConfigpath ) { } }
try { LoggerContext context = ( LoggerContext ) LogManager . getContext ( false ) ; context . setConfigLocation ( new File ( logConfigpath ) . toURI ( ) ) ; context . reconfigure ( ) ; // 重新初始化Log4j2的配置上下文 Logger log = LoggerFactory . getLogger ( Log4jUtil . class ) ; log . info ( "日志配置重新初始化完成:" + logConfigpath ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class QrCodeUtil { /** * 生成二维码到文件 , 二维码图片格式取决于文件的扩展名 * @ param content 文本内容 * @ param width 宽度 * @ param height 高度 * @ param targetFile 目标文件 , 扩展名决定输出格式 * @ return 目标文件 */ public static File generate ( String content , int width , int height , File targetFile ) { } }
final BufferedImage image = generate ( content , width , height ) ; ImgUtil . write ( image , targetFile ) ; return targetFile ;
public class OpenAPIUIBundlesUpdater { /** * Return as a string the contents of a file in the bundle . */ private static String getResource ( Bundle myBundle , String resourcePath ) { } }
if ( myBundle == null ) return null ; String bundleShortDescription = getBundleDescription ( myBundle ) ; StringBuilder responseString = new StringBuilder ( ) ; URL bundleResource = myBundle . getResource ( resourcePath ) ; if ( bundleResource != null ) { BufferedReader br = null ; try { // read the requested resource from the bundle br = new BufferedReader ( new InputStreamReader ( bundleResource . openConnection ( ) . getInputStream ( ) , "UTF-8" ) ) ; while ( br . ready ( ) ) { responseString . append ( br . readLine ( ) ) ; } br . close ( ) ; } catch ( Exception e ) { // shouldn ' t happen if ( OpenAPIUtils . isEventEnabled ( tc ) ) { Tr . event ( tc , "Exception trying to read resource at " + resourcePath + " from bundle " + bundleShortDescription ) ; } } } else { if ( OpenAPIUtils . isEventEnabled ( tc ) ) { Tr . event ( tc , "Unexpected error getting resource from WAB bundle." ) ; } } return responseString . toString ( ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link SingleOperationRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link SingleOperationRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "singleOperationRef" ) public JAXBElement < SingleOperationRefType > createSingleOperationRef ( SingleOperationRefType value ) { } }
return new JAXBElement < SingleOperationRefType > ( _SingleOperationRef_QNAME , SingleOperationRefType . class , null , value ) ;
public class AuditTaskProcessorFactory { /** * ( non - Javadoc ) * @ see * org . duracloud . mill . workman . TaskProcessorFactory # create ( org . duracloud . * common . queue . task . Task ) */ @ Override public final TaskProcessor create ( Task task ) throws TaskProcessorCreationFailedException { } }
if ( isSupported ( task ) ) { log . debug ( "creating task processor for " + task ) ; AuditTask auditTask = new AuditTask ( ) ; auditTask . readTask ( task ) ; return createImpl ( auditTask ) ; } throw new TaskProcessorCreationFailedException ( "Task is not an Audit task" ) ;
public class druidGParser { /** * druidG . g : 121:1 : insertRTStmnt returns [ RTInsertMeta iMeta ] : ( INSERT _ REALTIME WS INTO WS ( id = ID ) ( WS ) ? LPARAN ( WS ) ? selectItems [ iMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * ( WS ) ? RPARAN ( WS ) ? ) VALUES ( WS ) ? LPARAN ( WS ) ? ( a = anyValue ) ( ( WS ) ? ' , ' ( WS ) ? a = anyValue ) * ( WS ) ? RPARAN ( WS ) ? ( WHERE WS i = intervalClause ) ? ( WS BREAK WS BY WS gran = SINGLE _ QUOTE _ STRING ) ? ; */ public final RTInsertMeta insertRTStmnt ( ) throws RecognitionException { } }
RTInsertMeta iMeta = null ; Token id = null ; Token gran = null ; Object a = null ; List < Interval > i = null ; iMeta = new RTInsertMeta ( ) ; try { // druidG . g : 123:2 : ( ( INSERT _ REALTIME WS INTO WS ( id = ID ) ( WS ) ? LPARAN ( WS ) ? selectItems [ iMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * ( WS ) ? RPARAN ( WS ) ? ) VALUES ( WS ) ? LPARAN ( WS ) ? ( a = anyValue ) ( ( WS ) ? ' , ' ( WS ) ? a = anyValue ) * ( WS ) ? RPARAN ( WS ) ? ( WHERE WS i = intervalClause ) ? ( WS BREAK WS BY WS gran = SINGLE _ QUOTE _ STRING ) ? ) // druidG . g : 123:3 : ( INSERT _ REALTIME WS INTO WS ( id = ID ) ( WS ) ? LPARAN ( WS ) ? selectItems [ iMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * ( WS ) ? RPARAN ( WS ) ? ) VALUES ( WS ) ? LPARAN ( WS ) ? ( a = anyValue ) ( ( WS ) ? ' , ' ( WS ) ? a = anyValue ) * ( WS ) ? RPARAN ( WS ) ? ( WHERE WS i = intervalClause ) ? ( WS BREAK WS BY WS gran = SINGLE _ QUOTE _ STRING ) ? { // druidG . g : 123:3 : ( INSERT _ REALTIME WS INTO WS ( id = ID ) ( WS ) ? LPARAN ( WS ) ? selectItems [ iMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * ( WS ) ? RPARAN ( WS ) ? ) // druidG . g : 123:4 : INSERT _ REALTIME WS INTO WS ( id = ID ) ( WS ) ? LPARAN ( WS ) ? selectItems [ iMeta ] ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * ( WS ) ? RPARAN ( WS ) ? { match ( input , INSERT_REALTIME , FOLLOW_INSERT_REALTIME_in_insertRTStmnt875 ) ; match ( input , WS , FOLLOW_WS_in_insertRTStmnt877 ) ; match ( input , INTO , FOLLOW_INTO_in_insertRTStmnt879 ) ; match ( input , WS , FOLLOW_WS_in_insertRTStmnt881 ) ; // druidG . g : 123:31 : ( id = ID ) // druidG . g : 123:32 : id = ID { id = ( Token ) match ( input , ID , FOLLOW_ID_in_insertRTStmnt886 ) ; iMeta . dataSource = ( id != null ? id . getText ( ) : null ) ; } // druidG . g : 123:71 : ( WS ) ? int alt69 = 2 ; int LA69_0 = input . LA ( 1 ) ; if ( ( LA69_0 == WS ) ) { alt69 = 1 ; } switch ( alt69 ) { case 1 : // druidG . g : 123:71 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt891 ) ; } break ; } match ( input , LPARAN , FOLLOW_LPARAN_in_insertRTStmnt894 ) ; // druidG . g : 123:82 : ( WS ) ? int alt70 = 2 ; int LA70_0 = input . LA ( 1 ) ; if ( ( LA70_0 == WS ) ) { alt70 = 1 ; } switch ( alt70 ) { case 1 : // druidG . g : 123:82 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt896 ) ; } break ; } pushFollow ( FOLLOW_selectItems_in_insertRTStmnt899 ) ; selectItems ( iMeta ) ; state . _fsp -- ; // druidG . g : 123:105 : ( ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] ) * loop73 : while ( true ) { int alt73 = 2 ; int LA73_0 = input . LA ( 1 ) ; if ( ( LA73_0 == WS ) ) { int LA73_1 = input . LA ( 2 ) ; if ( ( LA73_1 == 91 ) ) { alt73 = 1 ; } } else if ( ( LA73_0 == 91 ) ) { alt73 = 1 ; } switch ( alt73 ) { case 1 : // druidG . g : 123:106 : ( WS ) ? ' , ' ( WS ) ? selectItems [ iMeta ] { // druidG . g : 123:106 : ( WS ) ? int alt71 = 2 ; int LA71_0 = input . LA ( 1 ) ; if ( ( LA71_0 == WS ) ) { alt71 = 1 ; } switch ( alt71 ) { case 1 : // druidG . g : 123:106 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt903 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_insertRTStmnt906 ) ; // druidG . g : 123:114 : ( WS ) ? int alt72 = 2 ; int LA72_0 = input . LA ( 1 ) ; if ( ( LA72_0 == WS ) ) { alt72 = 1 ; } switch ( alt72 ) { case 1 : // druidG . g : 123:114 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt908 ) ; } break ; } pushFollow ( FOLLOW_selectItems_in_insertRTStmnt911 ) ; selectItems ( iMeta ) ; state . _fsp -- ; } break ; default : break loop73 ; } } // druidG . g : 123:139 : ( WS ) ? int alt74 = 2 ; int LA74_0 = input . LA ( 1 ) ; if ( ( LA74_0 == WS ) ) { alt74 = 1 ; } switch ( alt74 ) { case 1 : // druidG . g : 123:139 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt916 ) ; } break ; } match ( input , RPARAN , FOLLOW_RPARAN_in_insertRTStmnt919 ) ; // druidG . g : 123:150 : ( WS ) ? int alt75 = 2 ; int LA75_0 = input . LA ( 1 ) ; if ( ( LA75_0 == WS ) ) { alt75 = 1 ; } switch ( alt75 ) { case 1 : // druidG . g : 123:150 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt921 ) ; } break ; } } match ( input , VALUES , FOLLOW_VALUES_in_insertRTStmnt927 ) ; // druidG . g : 124:10 : ( WS ) ? int alt76 = 2 ; int LA76_0 = input . LA ( 1 ) ; if ( ( LA76_0 == WS ) ) { alt76 = 1 ; } switch ( alt76 ) { case 1 : // druidG . g : 124:10 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt929 ) ; } break ; } match ( input , LPARAN , FOLLOW_LPARAN_in_insertRTStmnt932 ) ; // druidG . g : 124:21 : ( WS ) ? int alt77 = 2 ; int LA77_0 = input . LA ( 1 ) ; if ( ( LA77_0 == WS ) ) { alt77 = 1 ; } switch ( alt77 ) { case 1 : // druidG . g : 124:21 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt934 ) ; } break ; } // druidG . g : 124:25 : ( a = anyValue ) // druidG . g : 124:26 : a = anyValue { pushFollow ( FOLLOW_anyValue_in_insertRTStmnt940 ) ; a = anyValue ( ) ; state . _fsp -- ; iMeta . values . add ( a ) ; } // druidG . g : 124:62 : ( ( WS ) ? ' , ' ( WS ) ? a = anyValue ) * loop80 : while ( true ) { int alt80 = 2 ; int LA80_0 = input . LA ( 1 ) ; if ( ( LA80_0 == WS ) ) { int LA80_1 = input . LA ( 2 ) ; if ( ( LA80_1 == 91 ) ) { alt80 = 1 ; } } else if ( ( LA80_0 == 91 ) ) { alt80 = 1 ; } switch ( alt80 ) { case 1 : // druidG . g : 124:63 : ( WS ) ? ' , ' ( WS ) ? a = anyValue { // druidG . g : 124:63 : ( WS ) ? int alt78 = 2 ; int LA78_0 = input . LA ( 1 ) ; if ( ( LA78_0 == WS ) ) { alt78 = 1 ; } switch ( alt78 ) { case 1 : // druidG . g : 124:63 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt947 ) ; } break ; } match ( input , 91 , FOLLOW_91_in_insertRTStmnt950 ) ; // druidG . g : 124:71 : ( WS ) ? int alt79 = 2 ; int LA79_0 = input . LA ( 1 ) ; if ( ( LA79_0 == WS ) ) { alt79 = 1 ; } switch ( alt79 ) { case 1 : // druidG . g : 124:71 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt952 ) ; } break ; } pushFollow ( FOLLOW_anyValue_in_insertRTStmnt957 ) ; a = anyValue ( ) ; state . _fsp -- ; iMeta . values . add ( a ) ; } break ; default : break loop80 ; } } // druidG . g : 124:111 : ( WS ) ? int alt81 = 2 ; int LA81_0 = input . LA ( 1 ) ; if ( ( LA81_0 == WS ) ) { alt81 = 1 ; } switch ( alt81 ) { case 1 : // druidG . g : 124:111 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt963 ) ; } break ; } match ( input , RPARAN , FOLLOW_RPARAN_in_insertRTStmnt966 ) ; // druidG . g : 124:122 : ( WS ) ? int alt82 = 2 ; int LA82_0 = input . LA ( 1 ) ; if ( ( LA82_0 == WS ) ) { alt82 = 1 ; } switch ( alt82 ) { case 1 : // druidG . g : 124:122 : WS { match ( input , WS , FOLLOW_WS_in_insertRTStmnt968 ) ; } break ; } // druidG . g : 125:3 : ( WHERE WS i = intervalClause ) ? int alt83 = 2 ; int LA83_0 = input . LA ( 1 ) ; if ( ( LA83_0 == WHERE ) ) { alt83 = 1 ; } switch ( alt83 ) { case 1 : // druidG . g : 125:4 : WHERE WS i = intervalClause { match ( input , WHERE , FOLLOW_WHERE_in_insertRTStmnt974 ) ; match ( input , WS , FOLLOW_WS_in_insertRTStmnt976 ) ; pushFollow ( FOLLOW_intervalClause_in_insertRTStmnt980 ) ; i = intervalClause ( ) ; state . _fsp -- ; } break ; } // druidG . g : 126:4 : ( WS BREAK WS BY WS gran = SINGLE _ QUOTE _ STRING ) ? int alt84 = 2 ; int LA84_0 = input . LA ( 1 ) ; if ( ( LA84_0 == WS ) ) { int LA84_1 = input . LA ( 2 ) ; if ( ( LA84_1 == BREAK ) ) { alt84 = 1 ; } } switch ( alt84 ) { case 1 : // druidG . g : 126:5 : WS BREAK WS BY WS gran = SINGLE _ QUOTE _ STRING { match ( input , WS , FOLLOW_WS_in_insertRTStmnt988 ) ; match ( input , BREAK , FOLLOW_BREAK_in_insertRTStmnt990 ) ; match ( input , WS , FOLLOW_WS_in_insertRTStmnt992 ) ; match ( input , BY , FOLLOW_BY_in_insertRTStmnt994 ) ; match ( input , WS , FOLLOW_WS_in_insertRTStmnt996 ) ; gran = ( Token ) match ( input , SINGLE_QUOTE_STRING , FOLLOW_SINGLE_QUOTE_STRING_in_insertRTStmnt1000 ) ; iMeta . granularitySpec = new GranularitySpec ( unquote ( ( gran != null ? gran . getText ( ) : null ) ) ) ; } break ; } // We set this later after granularitySpec object is fully formed . if ( i != null && ! i . isEmpty ( ) ) { iMeta . granularitySpec . interval = i . get ( 0 ) ; // We already checked for list ' s emptiness ( it is safe to access get ( 0 ) . } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving } return iMeta ;
public class ListOfInterpreter { /** * { @ inheritDoc } */ public void execute ( Example example ) { } }
try { List < Fixture > fixtures = getFixtureList ( ) ; Example headers = example . at ( 0 , 0 ) ; Iterator < Fixture > it = fixtures . iterator ( ) ; Example row ; for ( row = example . nextSibling ( ) ; row != null && it . hasNext ( ) && canContinue ( stats ) ; row = row . nextSibling ( ) ) { processRow ( row . firstChild ( ) , headers , it . next ( ) ) ; if ( shouldStop ( stats ) ) { row . addChild ( ) . annotate ( Annotations . stopped ( ) ) ; } } while ( row != null && canContinue ( stats ) ) { missingRow ( row ) ; if ( shouldStop ( stats ) ) { row . addChild ( ) . annotate ( Annotations . stopped ( ) ) ; } row = row . nextSibling ( ) ; } while ( it . hasNext ( ) && canContinue ( stats ) ) { Fixture adapter = it . next ( ) ; addSurplusRow ( example , headers , adapter ) ; if ( shouldStop ( stats ) ) { example . lastSibling ( ) . addChild ( ) . annotate ( Annotations . stopped ( ) ) ; break ; } } } catch ( Exception e ) { example . firstChild ( ) . annotate ( exception ( e ) ) ; stats . exception ( ) ; if ( shouldStop ( stats ) ) { example . addChild ( ) . annotate ( Annotations . stopped ( ) ) ; } }
public class CollectionLiteralsTypeComputer { /** * If the expected type is a subtype of { @ link Map } , the resolved super type is returned . * This allows to query for the type arguments that are available on the expectation . */ protected LightweightTypeReference getMapExpectation ( LightweightTypeReference expectation ) { } }
if ( expectation != null && expectation . isResolved ( ) ) { LightweightTypeReference result = expectation . getSuperType ( Map . class ) ; if ( result != null && result . getTypeArguments ( ) . size ( ) == 2 ) { return result ; } } return null ;
public class Frame { /** * Returns an { @ link Indexer } for the < i > i < / i > th image plane . */ public < I extends Indexer > I createIndexer ( boolean direct , int i ) { } }
long [ ] sizes = { imageHeight , imageWidth , imageChannels } ; long [ ] strides = { imageStride , imageChannels , 1 } ; Buffer buffer = image [ i ] ; Object array = buffer . hasArray ( ) ? buffer . array ( ) : null ; switch ( imageDepth ) { case DEPTH_UBYTE : return array != null ? ( I ) UByteIndexer . create ( ( byte [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) UByteIndexer . create ( ( ByteBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) UByteIndexer . create ( new BytePointer ( ( ByteBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_BYTE : return array != null ? ( I ) ByteIndexer . create ( ( byte [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) ByteIndexer . create ( ( ByteBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) ByteIndexer . create ( new BytePointer ( ( ByteBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_USHORT : return array != null ? ( I ) UShortIndexer . create ( ( short [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) UShortIndexer . create ( ( ShortBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) UShortIndexer . create ( new ShortPointer ( ( ShortBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_SHORT : return array != null ? ( I ) ShortIndexer . create ( ( short [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) ShortIndexer . create ( ( ShortBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) ShortIndexer . create ( new ShortPointer ( ( ShortBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_INT : return array != null ? ( I ) IntIndexer . create ( ( int [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) IntIndexer . create ( ( IntBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) IntIndexer . create ( new IntPointer ( ( IntBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_LONG : return array != null ? ( I ) LongIndexer . create ( ( long [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) LongIndexer . create ( ( LongBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) LongIndexer . create ( new LongPointer ( ( LongBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_FLOAT : return array != null ? ( I ) FloatIndexer . create ( ( float [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) FloatIndexer . create ( ( FloatBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) FloatIndexer . create ( new FloatPointer ( ( FloatBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; case DEPTH_DOUBLE : return array != null ? ( I ) DoubleIndexer . create ( ( double [ ] ) array , sizes , strides ) . indexable ( this ) : direct ? ( I ) DoubleIndexer . create ( ( DoubleBuffer ) buffer , sizes , strides ) . indexable ( this ) : ( I ) DoubleIndexer . create ( new DoublePointer ( ( DoubleBuffer ) buffer ) , sizes , strides , false ) . indexable ( this ) ; default : assert false ; } return null ;
public class Neighbours { /** * Removes a Neighbouring ME . * When deleting an ME , all proxy subscriptions that were registered need * to be removed . * @ param meUUID The uuid of the Neighbouring ME . * @ param busId The bus that this ME belongs to . * @ param transaction The transaction in which to remove the Neighbour . * @ exception SIDestinationNotFoundException Thrown if the Neighbour isn ' t * known to this ME * @ exception SIStoreException Thrown if the Neighbour can ' t be removed from * the item stream . */ protected void removeNeighbour ( SIBUuid8 meUUID , String busId , Transaction transaction ) throws SIConnectionLostException , SIResourceException , SIErrorException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removeNeighbour" , new Object [ ] { busId , transaction } ) ; boolean recoveredNeighbour = false ; Neighbour neighbour = null ; synchronized ( _neighbours ) { neighbour = ( Neighbour ) _neighbours . get ( meUUID ) ; } if ( neighbour == null ) { recoveredNeighbour = true ; synchronized ( _recoveredNeighbours ) { neighbour = ( Neighbour ) _recoveredNeighbours . get ( meUUID ) ; } } // Does this Neighbour exist already . if ( neighbour != null ) { // Get the bus that the Neighbour belonged to . final BusGroup group = neighbour . getBus ( ) ; if ( group != null ) { group . removeNeighbour ( neighbour ) ; if ( group . getMembers ( ) . length == 0 ) deleteBus ( group ) ; } // Remove all the proxies for this Neighbour removeRegisteredProxies ( neighbour , transaction , ! recoveredNeighbour ) ; // Remove Neighbour from the item stream . try { neighbour . remove ( transaction , neighbour . getLockID ( ) ) ; } catch ( MessageStoreException e ) { // If the MessageStoreException is caught , then this is bad , log a FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.proxyhandler.Neighbours.removeRegisteredProxies" , "1:879:1.113" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeRegisteredProxies" , "SIResourceException" ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:888:1.113" , e , neighbour . getUUID ( ) } ) ; throw new SIResourceException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:897:1.113" , e , neighbour . getUUID ( ) } , null ) , e ) ; } // Remove the given neighbour object from the list of Neighbours if ( ! recoveredNeighbour ) { synchronized ( _neighbours ) { _neighbours . remove ( meUUID ) ; } } else { synchronized ( _recoveredNeighbours ) { _recoveredNeighbours . remove ( meUUID ) ; } } // Ask the Neighbour to delete its destination . neighbour . deleteDestination ( ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeNeighbour" , "Neighbour Unknown" ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0005" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:932:1.113" , meUUID } ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0005" , new Object [ ] { "com.ibm.ws.sib.processor.proxyhandler.Neighbours" , "1:940:1.113" , meUUID } , null ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeNeighbour" ) ;
public class AsyncHttpClientRestClient { /** * Delegate to { @ link AsyncHttpClientRestClient # doGet ( String ) } and also use { @ link Gson } to parse the returned json * into the requested object * @ param url url to call * @ param returnType taskType to parse the returned json into * @ param < T > the return taskType , should match returnType * @ return the KanbaneryResource created by parsing the retrieved json * @ throws ServerCommunicationException if the response body could not be fetched */ @ Override @ SuppressWarnings ( "unchecked" ) public < T > T doGet ( String url , Type returnType ) throws ServerCommunicationException { } }
RestClientResponse response = doGet ( url ) ; String responseBody = response . getResponseBody ( ) ; return ( T ) gson . fromJson ( responseBody , returnType ) ;
public class PossibleIncompleteSerialization { /** * implements the visitor to look for classes that are serializable , and are derived from non serializable classes and don ' t either implement methods in * Externalizable or Serializable to save parent class fields . * @ param classContext * the context object of the currently parsed class */ @ Override public void visitClassContext ( ClassContext classContext ) { } }
try { JavaClass cls = classContext . getJavaClass ( ) ; if ( isSerializable ( cls ) ) { JavaClass superCls = cls . getSuperClass ( ) ; if ( ! isSerializable ( superCls ) && hasSerializableFields ( superCls ) && ! hasSerializingMethods ( cls ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . PIS_POSSIBLE_INCOMPLETE_SERIALIZATION . name ( ) , NORMAL_PRIORITY ) . addClass ( cls ) ) ; } } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; }
public class HadoopInputSplit { private void writeObject ( ObjectOutputStream out ) throws IOException { } }
// serialize the parent fields and the final fields out . defaultWriteObject ( ) ; // write the input split ( ( Writable ) mapreduceInputSplit ) . write ( out ) ;
public class Logging { /** * Split type : : name into type and name - returns type */ private String get_target_type ( String ttype_tname ) { } }
String [ ] split ; try { split = ttype_tname . split ( LOGGING_SEPARATOR ) ; } catch ( Exception e ) { return "unknown" ; } return split [ 0 ] ;
public class SubReportBuilder { /** * defines the KEY in the parent report parameters map where to get the subreport parameters map . * @ param path where to get the parameter map for the subrerpot . * @ return */ public SubReportBuilder setParameterMapPath ( String path ) { } }
subreport . setParametersExpression ( path ) ; subreport . setParametersMapOrigin ( DJConstants . SUBREPORT_PARAMETER_MAP_ORIGIN_PARAMETER ) ; return this ;
public class WebSocketNodeService { /** * 强制关闭用户的WebSocket * @ param userid Serializable * @ param targetAddress InetSocketAddress * @ return 无返回值 */ @ Override public CompletableFuture < Integer > forceCloseWebSocket ( Serializable userid , @ RpcTargetAddress InetSocketAddress targetAddress ) { } }
// 不能从sncpNodeAddresses中移除 , 因为engine . forceCloseWebSocket 会调用到disconnect if ( logger . isLoggable ( Level . FINEST ) ) logger . finest ( WebSocketNodeService . class . getSimpleName ( ) + ".event: " + userid + " forceCloseWebSocket from " + targetAddress ) ; if ( localEngine == null ) return CompletableFuture . completedFuture ( 0 ) ; return CompletableFuture . completedFuture ( localEngine . forceCloseLocalWebSocket ( userid ) ) ;
public class TaskNodeStatusImpl { /** * This needs to happen in line rather than in a stage because we need to note . * the messages we send to the tasks before we start processing msgs from the * nodes . ( Acks and Topology msgs ) */ @ Override public void expectAckFor ( final Type msgType , final String srcId ) { } }
LOG . entering ( "TaskNodeStatusImpl" , "expectAckFor" , new Object [ ] { getQualifiedName ( ) , msgType , srcId } ) ; LOG . finest ( getQualifiedName ( ) + "Adding " + srcId + " to sources" ) ; statusMap . add ( msgType , srcId ) ; LOG . exiting ( "TaskNodeStatusImpl" , "expectAckFor" , getQualifiedName ( ) + "Sources from which ACKs for " + msgType + " are expected: " + statusMap . get ( msgType ) ) ;
public class HtmlReporter { /** * Construct HTML code snippet for stack trace information . * @ param cause the causing error . * @ return */ private String getStackTraceHtml ( Throwable cause ) { } }
StringBuilder stackTraceBuilder = new StringBuilder ( ) ; stackTraceBuilder . append ( cause . getClass ( ) . getName ( ) ) . append ( ": " ) . append ( cause . getMessage ( ) ) . append ( "\n " ) ; for ( int i = 0 ; i < cause . getStackTrace ( ) . length ; i ++ ) { stackTraceBuilder . append ( "\n\t at " ) ; stackTraceBuilder . append ( cause . getStackTrace ( ) [ i ] ) ; } return "<tr><td colspan=\"2\">" + "<div class=\"error-detail\"><pre>" + stackTraceBuilder . toString ( ) + "</pre>" + getCodeSnippetHtml ( cause ) + "</div></td></tr>" ;
public class Package { /** * Add a { @ link Tag } to this { @ link Package } */ public void addTag ( Tag tag ) { } }
set ( PackageMetadata . TAGS , concat ( getTags ( ) , singletonList ( tag ) ) ) ;
public class AdaptiveTableLayout { /** * Remove item view holders from base collection * @ param toRemove Collection with view holders which need to remove */ private void removeViewHolders ( @ Nullable Collection < ViewHolder > toRemove ) { } }
if ( toRemove != null ) { for ( ViewHolder holder : toRemove ) { mViewHolders . remove ( holder . getRowIndex ( ) , holder . getColumnIndex ( ) ) ; } }
public class KeyboardManager { /** * Notifies all registered key observers of the supplied key event . This method provides a * thread - safe manner in which to notify the observers , which is necessary since the * { @ link KeyInfo } objects do various antics from the interval manager thread whilst we may do * other notification from the AWT thread when normal key events are handled . */ protected synchronized void notifyObservers ( int id , int keyCode , long timestamp ) { } }
_keyOp . init ( id , keyCode , timestamp ) ; _observers . apply ( _keyOp ) ;
public class ContextUtils { /** * copy assets file to destinations . * @ param context * @ param srcFile source file in assets . * @ param dstFile destination files , full path with folder . if folder is empty , * means copy to context ' s files folder . * @ return { @ link # COPY _ SUCCESS } means success . { @ link # COPY _ DST _ EXIST } , * { @ link # COPY _ ERROR } */ public static int CopyAssets ( Context context , String srcFile , String dstFile ) { } }
int result = COPY_SUCCESS ; InputStream in = null ; OutputStream out = null ; try { File fi ; if ( dstFile . equals ( FileUtils . getFileName ( dstFile ) ) ) { fi = new File ( context . getFilesDir ( ) , dstFile ) ; } else { fi = new File ( dstFile ) ; } in = context . getAssets ( ) . open ( srcFile , AssetManager . ACCESS_STREAMING ) ; if ( fi . exists ( ) && in . available ( ) <= fi . length ( ) ) { result = COPY_DST_EXIST ; } else { out = new FileOutputStream ( fi ) ; byte [ ] buffer = new byte [ 10240 ] ; int read ; while ( ( read = in . read ( buffer ) ) != - 1 ) { out . write ( buffer , 0 , read ) ; } buffer = null ; } } catch ( IOException e ) { Log . e ( TAG , "" , e ) ; result = COPY_ERROR ; } finally { if ( in != null ) try { in . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } in = null ; if ( out != null ) { try { out . flush ( ) ; out . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } out = null ; } return result ;
public class PlayCollectContext { /** * A legal digit map as described in < a href = " https : / / tools . ietf . org / html / rfc2885 # section - 7.1.14 " > section 7.1.14 < / a > of the * MEGACO protocol using the DTMF mappings associated with the Megaco DTMF Detection Package described in the Megaco * protocol document . * < b > This parameter should not be specified if one or both of the Minimum # Of Digits parameter and the Maximum Number Of * Digits parameter is present . < / b > * @ return The digit pattern or an empty String if not specified . */ public String getDigitPattern ( ) { } }
String pattern = Optional . fromNullable ( getParameter ( SignalParameters . DIGIT_PATTERN . symbol ( ) ) ) . or ( "" ) ; if ( ! pattern . isEmpty ( ) ) { // Replace pattern to comply with MEGACO digitMap pattern = pattern . replace ( "." , "*" ) . replace ( "x" , "\\d" ) ; } return pattern ;
public class ProducerSequenceFactory { /** * bitmap cache get - > * background thread hand - off - > multiplex - > bitmap cache - > decode - > * branch on separate images * - > exif resize and rotate - > exif thumbnail creation * - > local image resize and rotate - > add meta data producer - > multiplex - > encoded cache - > * ( webp transcode ) - > local file fetch . */ private synchronized Producer < CloseableReference < CloseableImage > > getLocalImageFileFetchSequence ( ) { } }
if ( mLocalImageFileFetchSequence == null ) { LocalFileFetchProducer localFileFetchProducer = mProducerFactory . newLocalFileFetchProducer ( ) ; mLocalImageFileFetchSequence = newBitmapCacheGetToLocalTransformSequence ( localFileFetchProducer ) ; } return mLocalImageFileFetchSequence ;
public class TimedCache { /** * 定时清理 * @ param delay 间隔时长 , 单位毫秒 */ public void schedulePrune ( long delay ) { } }
this . pruneJobFuture = GlobalPruneTimer . INSTANCE . schedule ( new Runnable ( ) { @ Override public void run ( ) { prune ( ) ; } } , delay ) ;
public class ListVolumeInitiatorsResult { /** * The host names and port numbers of all iSCSI initiators that are connected to the gateway . * @ param initiators * The host names and port numbers of all iSCSI initiators that are connected to the gateway . */ public void setInitiators ( java . util . Collection < String > initiators ) { } }
if ( initiators == null ) { this . initiators = null ; return ; } this . initiators = new com . amazonaws . internal . SdkInternalList < String > ( initiators ) ;
public class CmsXmlContentDefinition { /** * Calculates the schema type for the given element name by recursing into the schema structure . < p > * @ param elementPath the element xpath to look up the type for * @ return the type for the given element name , or < code > null < / code > if no * node is defined with this name */ private I_CmsXmlSchemaType getSchemaTypeRecusive ( String elementPath ) { } }
String path = CmsXmlUtils . getFirstXpathElement ( elementPath ) ; I_CmsXmlSchemaType type = m_types . get ( path ) ; if ( type == null ) { // no node with the given path defined in schema return null ; } // check if recursion is required to get value from a nested schema if ( type . isSimpleType ( ) || ! CmsXmlUtils . isDeepXpath ( elementPath ) ) { // no recursion required return type ; } // recursion required since the path is an xpath and the type must be a nested content definition CmsXmlNestedContentDefinition nestedDefinition = ( CmsXmlNestedContentDefinition ) type ; path = CmsXmlUtils . removeFirstXpathElement ( elementPath ) ; return nestedDefinition . getNestedContentDefinition ( ) . getSchemaType ( path ) ;
public class ClassicLockView { /** * Displays a progress bar on top of the action button . This will also * enable or disable the action button . * @ param show whether to show or hide the action bar . */ public void showProgress ( boolean show ) { } }
if ( actionButton != null ) { actionButton . showProgress ( show ) ; } if ( formLayout != null ) { formLayout . setEnabled ( ! show ) ; }
public class NoProxyPortSelector { /** * Retrieve ( and create , if necessary ) the singleton instance . * @ return the NoProxyPortSelector singleton */ public static NoProxyPortSelector getInstance ( ) { } }
if ( _instance == null ) { synchronized ( NoProxyPortSelector . class ) { if ( _instance == null ) { _instance = new NoProxyPortSelector ( ) ; } } } return _instance ;
public class Log4jUtils { /** * ( Re ) Loads the config file using { @ link DOMConfigurator } for { @ code . xml } files and { @ link PropertyConfigurator } for { @ code . proeprties } files . * Reloads the file automatically when changed . * @ param configFile log4j configuration file , { @ code . properties } or { @ code . xml } * @ param delay number of milliseconds between checks of changes in the config file , defaults to { @ link Log4jUtils # DEFAULT _ WATCH _ DELAY } if set to 0 */ public static void reloadAndWatch ( final String configFile , final long delay ) { } }
long watchDelay = delay == 0 ? Log4jUtils . DEFAULT_WATCH_DELAY : delay ; if ( configFile != null ) { if ( configFile . endsWith ( XML_FILE_EXENSION ) ) { DOMConfigurator . configureAndWatch ( configFile , watchDelay ) ; } else PropertyConfigurator . configureAndWatch ( configFile , watchDelay ) ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( "Reloads and watches log4j configuration: " + configFile ) ; }
public class Labeling { /** * labeling observation sequences . * @ param data list of sequences with specified format which can be read by DataReader * @ return a list of sentences with tags annotated */ @ SuppressWarnings ( "unchecked" ) public List seqLabeling ( String data ) { } }
List < Sentence > obsvSeqs = dataReader . readString ( data ) ; return labeling ( obsvSeqs ) ;
public class AbstractCodeCreator { /** * adds requirements on one or more files that must be copied into the project , * this is only for files that already exist in tcMenu library , prefer to use the * other version that takes ` PluiginFileDependency ` * @ see PluginFileDependency * @ param files the list of library files . */ protected void addLibraryFiles ( String ... files ) { } }
libraryFiles . addAll ( Arrays . stream ( files ) . map ( PluginFileDependency :: fileInTcMenu ) . collect ( Collectors . toList ( ) ) ) ;
public class NoCacheDatabase { /** * Export a tango device into the tango db ( execute DbExportDevice on DB * device ) * @ param info * export info { @ link DeviceExportInfo } * @ throws DevFailed */ @ Override public void exportDevice ( final DeviceExportInfo info ) throws DevFailed { } }
final String [ ] array = info . toStringArray ( ) ; final DeviceData argin = new DeviceData ( ) ; argin . insert ( array ) ; database . command_inout ( "DbExportDevice" , argin ) ;
public class NumberUtil { /** * 将10进制的String安全的转化为Integer . * 当str为空或非数字字符串时 , 返回default值 */ public static Integer toIntObject ( @ Nullable String str , Integer defaultValue ) { } }
if ( StringUtils . isEmpty ( str ) ) { return defaultValue ; } try { return Integer . valueOf ( str ) ; } catch ( final NumberFormatException nfe ) { return defaultValue ; }
public class XBELConverter { /** * { @ inheritDoc } */ @ Override public XBELDocument unmarshal ( File f ) throws JAXBException , IOException { } }
final Unmarshaller unmarshaller = createNewUnmashaller ( ) ; FileInputStream fis = new FileInputStream ( f ) ; Reader reader = new InputStreamReader ( fis , UTF_8 ) ; return ( XBELDocument ) unmarshaller . unmarshal ( reader ) ;
public class AtomicVariableWidthArray { /** * Sets the element at the given index , but only if the previous value was the expected value . * @ param i the index * @ param expect the expected value * @ param update the new value * @ return true on success */ public final boolean compareAndSet ( int i , int expect , int update ) { } }
if ( fullWidth ) { return array . compareAndSet ( i , expect , update ) ; } boolean success = false ; int index = getIndex ( i ) ; int subIndex = getSubIndex ( i ) ; while ( ! success ) { int prev = array . get ( index ) ; if ( unPack ( prev , subIndex ) != expect ) { return false ; } int next = pack ( prev , update , subIndex ) ; success = array . compareAndSet ( index , prev , next ) ; } return true ;
import java . util . ArrayList ; import java . util . List ; public class ListAddition { /** * The function adds two lists element - wise . * > > > list _ addition ( [ 1 , 2 , 3 ] , [ 4 , 5 , 6 ] ) * [ 5 , 7 , 9] * > > > list _ addition ( [ 1 , 2 ] , [ 3 , 4 ] ) * [ 4 , 6] * > > > list _ addition ( [ 10 , 20 ] , [ 50 , 70 ] ) * [ 60 , 90] */ public static List < Integer > listAddition ( List < Integer > list1 , List < Integer > list2 ) { } }
List < Integer > combined = new ArrayList < > ( ) ; int size = Math . min ( list1 . size ( ) , list2 . size ( ) ) ; for ( int i = 0 ; i < size ; i ++ ) { combined . add ( list1 . get ( i ) + list2 . get ( i ) ) ; } return combined ;
public class Hosts { /** * Chooses one of the available { @ link InetAddress } based on the specified preference . * If the preferred address is not part of the available addresses it will be ignored . * @ param preferred * @ return */ private static InetAddress chooseAddress ( String preferred ) throws UnknownHostException { } }
Set < InetAddress > addresses = getAddresses ( ) ; if ( preferred != null && ! preferred . isEmpty ( ) ) { // Favor preferred address if exists try { InetAddress preferredAddress = InetAddress . getByName ( preferred ) ; if ( addresses != null && addresses . contains ( preferredAddress ) ) { LOG . info ( "preferred address is " + preferredAddress . getHostAddress ( ) + " for host " + preferredAddress . getHostName ( ) ) ; return preferredAddress ; } } catch ( UnknownHostException e ) { // noop } for ( InetAddress address : addresses ) { if ( preferred . equals ( address . getHostName ( ) ) ) { return address ; } } StringBuffer hostNameBuffer = new StringBuffer ( ) ; for ( InetAddress address : addresses ) { if ( hostNameBuffer . length ( ) > 0 ) { hostNameBuffer . append ( ", " ) ; } hostNameBuffer . append ( address . getHostName ( ) + "/" + address . getHostAddress ( ) ) ; } LOG . warn ( "Could not find network address for preferred '" + preferred + "' when the addresses were: " + hostNameBuffer ) ; } if ( addresses . contains ( InetAddress . getLocalHost ( ) ) ) { // Then if local host address is not bound to a loop - back interface , use it . return InetAddress . getLocalHost ( ) ; } else if ( addresses != null && ! addresses . isEmpty ( ) ) { // else return the first available addrress return addresses . toArray ( new InetAddress [ addresses . size ( ) ] ) [ 0 ] ; } else { // else we are forcedt to use the localhost address . return InetAddress . getLocalHost ( ) ; }
public class ParallelTaskManager { /** * Clean wait task queue . */ public synchronized void cleanWaitTaskQueue ( ) { } }
for ( ParallelTask task : waitQ ) { task . setState ( ParallelTaskState . COMPLETED_WITH_ERROR ) ; task . getTaskErrorMetas ( ) . add ( new TaskErrorMeta ( TaskErrorType . USER_CANCELED , "NA" ) ) ; logger . info ( "task {} removed from wait q. This task has been marked as USER CANCELED." , task . getTaskId ( ) ) ; } waitQ . clear ( ) ;
public class GrowQueue_I8 { /** * Returns the index of the first element with the specified ' value ' . return - 1 if it wasn ' t found * @ param value Value to search for * @ return index or - 1 if it ' s not in the list */ public int indexOf ( byte value ) { } }
for ( int i = 0 ; i < size ; i ++ ) { if ( data [ i ] == value ) return i ; } return - 1 ;
public class Util { /** * Rolls back a { @ link Connection } and logs exceptions without throwing . * @ param connection */ static void rollback ( Connection connection ) { } }
if ( connection != null ) try { connection . rollback ( ) ; log . debug ( "rolled back" ) ; } catch ( SQLException e ) { throw new SQLRuntimeException ( e ) ; }
public class CellConstraints { /** * Checks and verifies that the horizontal alignment is a horizontal and the vertical alignment * is vertical . * @ param horizontalAlignment the horizontal alignment * @ param verticalAlignment the vertical alignment * @ throws IllegalArgumentException if an alignment is invalid */ private static void ensureValidOrientations ( Alignment horizontalAlignment , Alignment verticalAlignment ) { } }
if ( ! horizontalAlignment . isHorizontal ( ) ) { throw new IllegalArgumentException ( "The horizontal alignment must be one of: left, center, right, fill, default." ) ; } if ( ! verticalAlignment . isVertical ( ) ) { throw new IllegalArgumentException ( "The vertical alignment must be one of: top, center, bottom, fill, default." ) ; }
public class GVRAndroidResource { /** * Close the open stream . * Close the stream if it was opened before */ public synchronized final void closeStream ( ) { } }
try { if ( ( stream != null ) && ( streamState == StreamStates . OPEN ) ) { stream . close ( ) ; stream = null ; } streamState = StreamStates . CLOSED ; } catch ( IOException e ) { e . printStackTrace ( ) ; }
public class ReferenceDataSourceDescriptionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ReferenceDataSourceDescription referenceDataSourceDescription , ProtocolMarshaller protocolMarshaller ) { } }
if ( referenceDataSourceDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( referenceDataSourceDescription . getReferenceId ( ) , REFERENCEID_BINDING ) ; protocolMarshaller . marshall ( referenceDataSourceDescription . getTableName ( ) , TABLENAME_BINDING ) ; protocolMarshaller . marshall ( referenceDataSourceDescription . getS3ReferenceDataSourceDescription ( ) , S3REFERENCEDATASOURCEDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( referenceDataSourceDescription . getReferenceSchema ( ) , REFERENCESCHEMA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CoreBiGramTableDictionary { /** * 热更新二元接续词典 < br > * 集群环境 ( 或其他IOAdapter ) 需要自行删除缓存文件 * @ return 是否成功 */ public static boolean reload ( ) { } }
String biGramDictionaryPath = HanLP . Config . BiGramDictionaryPath ; IOUtil . deleteFile ( biGramDictionaryPath + ".table" + Predefine . BIN_EXT ) ; return load ( biGramDictionaryPath ) ;
public class DSetImpl { /** * Determine whether this set is a proper superset of the set referenced by * < code > otherSet < / code > . * @ paramotherSetAnother set . * @ return True if this set is a proper superset of the set referenced by * < code > otherSet < / code > , otherwise false . */ public boolean properSupersetOf ( org . odmg . DSet otherSet ) { } }
return ( otherSet . size ( ) > 0 && otherSet . size ( ) < this . size ( ) && this . supersetOf ( otherSet ) ) ;
public class JcElement { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > access the id of a node or relation , return a < b > JcNumber < / b > < / i > < / div > * < br / > */ public JcNumber id ( ) { } }
JcNumber ret = new JcNumber ( null , this , new FunctionInstance ( FUNCTION . PropertyContainer . ID , 1 ) ) ; QueryRecorder . recordInvocationConditional ( this , "id" , ret ) ; return ret ;
public class TypeUtil { /** * Creates a RegEx Pattern object , if given String is a valid regular expression . * @ param _ regExStr regex string to check * @ return Pattern - Object or null if given String is no valid RegEx */ public static Pattern createRegExPatternIfValid ( String _regExStr ) { } }
if ( StringUtil . isBlank ( _regExStr ) ) { return null ; } Pattern pattern ; try { pattern = Pattern . compile ( _regExStr ) ; } catch ( PatternSyntaxException _ex ) { return null ; } return pattern ;
public class Queue { /** * Gets a new QueueBuilder using this Queue as the parent . */ public < P extends ProcessBuilder > QueueBuilder < P > newQueueBuilder ( Class < P > process_builder_class ) { } }
return QueueFactory . newQueueBuilder ( this , process_builder_class ) ;
public class ConvertNumberHandler { /** * ( non - Javadoc ) * @ see org . apache . myfaces . view . facelets . tag . ObjectHandler # setAttributes ( javax . faces . view . facelets . FaceletContext , java . lang . Object ) */ public void setAttributes ( FaceletContext ctx , Object obj ) { } }
super . setAttributes ( ctx , obj ) ; NumberConverter c = ( NumberConverter ) obj ; if ( this . locale != null ) { c . setLocale ( ComponentSupport . getLocale ( ctx , this . locale ) ) ; }
public class EC2Instance { /** * Get encrypted initial Windows password . This method only definitely works with standard Amazon AMIs : * http : / / aws . amazon . com / windows / amis / * Other AMIs in the public library may have had their password changed , and it will not be retrievable on instances * launched from those . * @ param instanceId * @ return * @ throws InternalException * @ throws CloudException */ @ Override public @ Nullable String getPassword ( @ Nonnull String instanceId ) throws InternalException , CloudException { } }
APITrace . begin ( getProvider ( ) , "getPassword" ) ; try { return new GetPassCallable ( instanceId , getProvider ( ) ) . call ( ) ; } catch ( CloudException ce ) { throw ce ; } catch ( Exception e ) { throw new InternalException ( e ) ; } finally { APITrace . end ( ) ; }
public class TreeMutator { /** * Mutates the given chromosome . * @ see # mutate ( Gene , Random ) * @ param chromosome the chromosome to mutate * @ param p the mutation probability for the underlying genetic objects * @ param random the random engine used for the genotype mutation * @ return the mutation result */ @ Override protected MutatorResult < Chromosome < G > > mutate ( final Chromosome < G > chromosome , final double p , final Random random ) { } }
final int P = probability . toInt ( p ) ; return random . nextInt ( ) < P ? mutate ( chromosome ) : MutatorResult . of ( chromosome ) ;
public class DescribeEntityAggregatesResult { /** * The number of entities that are affected by each of the specified events . * @ param entityAggregates * The number of entities that are affected by each of the specified events . */ public void setEntityAggregates ( java . util . Collection < EntityAggregate > entityAggregates ) { } }
if ( entityAggregates == null ) { this . entityAggregates = null ; return ; } this . entityAggregates = new java . util . ArrayList < EntityAggregate > ( entityAggregates ) ;
public class XAResourceRecoveryImpl { /** * Close a managed connection * @ param mc The managed connection */ private void close ( ManagedConnection mc ) { } }
log . debugf ( "Closing managed connection for recovery (%s)" , mc ) ; if ( mc != null ) { try { mc . cleanup ( ) ; } catch ( ResourceException ire ) { log . debugf ( "Error during recovery cleanup" , ire ) ; } } if ( mc != null ) { try { mc . destroy ( ) ; } catch ( ResourceException ire ) { log . debugf ( "Error during recovery destroy" , ire ) ; } } // The managed connection for recovery is now gone recoverMC = null ;
public class GeoFire { /** * Removes the location for a key from this GeoFire . * @ param key The key to remove from this GeoFire * @ param completionListener A completion listener that is called once the location is successfully removed * from the server or an error occurred */ public void removeLocation ( final String key , final CompletionListener completionListener ) { } }
if ( key == null ) { throw new NullPointerException ( ) ; } DatabaseReference keyRef = this . getDatabaseRefForKey ( key ) ; if ( completionListener != null ) { keyRef . setValue ( null , new DatabaseReference . CompletionListener ( ) { @ Override public void onComplete ( DatabaseError databaseError , DatabaseReference databaseReference ) { completionListener . onComplete ( key , databaseError ) ; } } ) ; } else { keyRef . setValue ( null ) ; }
public class ConditionalCheck { /** * Ensures that a passed boolean is not equal to another boolean . The comparison is made using * < code > expected = = check < / code > . * @ param condition * condition must be { @ code true } ^ so that the check will be performed * @ param expected * Expected value * @ param check * boolean to be checked * @ param message * an error message describing why the booleans must equal ( will be passed to * { @ code IllegalEqualException } ) * @ throws IllegalEqualException * if both argument values are not equal */ @ Throws ( IllegalEqualException . class ) public static void notEquals ( final boolean condition , final boolean expected , final boolean check , @ Nonnull final String message ) { } }
if ( condition ) { Check . notEquals ( expected , check , message ) ; }
public class NetTimeConnector { /** * / * [ deutsch ] * < p > Liefert die aktuelle Zeit , nachdem eine Verbindung wenigstens * einmal hergestellt wurde . < / p > * < p > Hat es noch keine Verbindung gegeben , dann zeigt diese Methode * lediglich die Zeit an , zu der diese Uhr konstruiert wurde . Zu * beachten : Diese Methode reagiert empfindlich auf jedwede & Auml ; nderung * der lokalen Uhr , die vom zugrundeliegenden Betriebssystem verursacht * wird . Wenn eine Anwendung direkt die Internet - Zeit ben & ouml ; tigt , * dann kann stattdessen folgender Code verwendet werden ( verursacht * eine Netzwerkverbindung ) : < / p > * < pre > * NetTimeConnector & lt ; ? & gt ; clock = . . . ; * clock . connect ( ) ; * Moment currentTime = clock . getLastConnectionTime ( ) ; * < / pre > * @ return Moment * @ see # isRunning ( ) */ @ Override public Moment currentTime ( ) { } }
final ConnectionResult cr = this . result ; if ( cr == null ) { return this . startMoment ; } long localMicros = SystemClock . MONOTONIC . realTimeInMicros ( ) ; long amount = localMicros + cr . getActualOffset ( localMicros ) - extractMicros ( cr . lastMoment ) ; return cr . lastMoment . plus ( amount * 1000 , SI . NANOSECONDS ) ;
public class NtFileNodeRepresentation { /** * ( non - Javadoc ) * @ see * org . exoplatform . services . jcr . ext . resource . NodeRepresentation # getProperties ( java . lang . String ) */ public Collection < HierarchicalProperty > getProperties ( String name ) throws RepositoryException { } }
ArrayList < HierarchicalProperty > props = new ArrayList < HierarchicalProperty > ( ) ; if ( "jcr:primaryType" . equals ( name ) || "jcr:mixinTypes" . equals ( name ) ) return null ; if ( content != null && content . getProperty ( name ) != null ) { props . addAll ( content . getProperties ( name ) ) ; } try { String ns = ( ( ExtendedSession ) node . getSession ( ) ) . getLocationFactory ( ) . parseJCRName ( name ) . getNamespace ( ) ; Property p = node . getProperty ( name ) ; if ( p . getDefinition ( ) . isMultiple ( ) ) { Value [ ] v = p . getValues ( ) ; for ( int i = 0 ; i < v . length ; i ++ ) { props . add ( new HierarchicalProperty ( name , v [ i ] . getString ( ) , ns ) ) ; } } else { props . add ( new HierarchicalProperty ( name , p . getString ( ) , ns ) ) ; } // PropertyIterator iter = node . getProperties ( name ) ; // while ( iter . hasNext ( ) ) { // Property prop = iter . nextProperty ( ) ; // props . add ( new HierarchicalProperty ( name , prop . getString ( ) , ns ) ) ; } catch ( PathNotFoundException e ) { if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "An exception occurred: " + e . getMessage ( ) ) ; } } return props ;
public class TaskAddHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the TaskAddHeaders object itself . */ public TaskAddHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class UnicodeSet { /** * Removes the specified string from this set if it is present . * The set will not contain the specified string once the call * returns . * @ param s the string to be removed * @ return this object , for chaining */ public final UnicodeSet remove ( CharSequence s ) { } }
int cp = getSingleCP ( s ) ; if ( cp < 0 ) { strings . remove ( s . toString ( ) ) ; pat = null ; } else { remove ( cp , cp ) ; } return this ;
public class ClassName { /** * Returns the package component of { @ code pathName } . * @ param pathName a dot - separated path */ @ Requires ( "pathName != null" ) @ Ensures ( "result != null" ) public static String getPackageName ( String pathName ) { } }
int lastSep = pathName . lastIndexOf ( '.' ) ; if ( lastSep == - 1 ) { return "" ; } else { return pathName . substring ( 0 , lastSep ) ; }
public class KafkaQueue { protected KafkaClient buildKafkaClient ( ) throws Exception { } }
if ( StringUtils . isBlank ( bootstrapServers ) ) { throw new IllegalStateException ( "Kafka bootstrap server list is not defined." ) ; } KafkaClient kafkaClient = new KafkaClient ( bootstrapServers ) ; kafkaClient . setProducerProperties ( consumerProps ) . setConsumerProperties ( consumerProps ) ; kafkaClient . init ( ) ; return kafkaClient ;
public class LinkPredicates { /** * Returns a Predicate that is matching links having the specified type { @ link Link # getType ( ) type } * @ param type the expected media type of the link * @ return Predicate used to select links */ public static Predicate < Link > havingType ( final String type ) { } }
return link -> type . equals ( link . getType ( ) ) ;
public class BceHttpClient { /** * Create asynchronous http client based on connection manager . * @ param connectionManager Asynchronous http client connection manager . * @ return Asynchronous http client based on connection manager . */ protected CloseableHttpAsyncClient createHttpAsyncClient ( NHttpClientConnectionManager connectionManager ) { } }
HttpAsyncClientBuilder builder = HttpAsyncClients . custom ( ) . setConnectionManager ( connectionManager ) ; int socketBufferSizeInBytes = this . config . getSocketBufferSizeInBytes ( ) ; if ( socketBufferSizeInBytes > 0 ) { builder . setDefaultConnectionConfig ( ConnectionConfig . custom ( ) . setBufferSize ( socketBufferSizeInBytes ) . build ( ) ) ; } return builder . build ( ) ;
public class ChannelOutboundBuffer { /** * Returns an array of direct NIO buffers if the currently pending messages are made of { @ link ByteBuf } only . * { @ link # nioBufferCount ( ) } and { @ link # nioBufferSize ( ) } will return the number of NIO buffers in the returned * array and the total number of readable bytes of the NIO buffers respectively . * Note that the returned array is reused and thus should not escape * { @ link AbstractChannel # doWrite ( ChannelOutboundBuffer ) } . * Refer to { @ link NioSocketChannel # doWrite ( ChannelOutboundBuffer ) } for an example . * @ param maxCount The maximum amount of buffers that will be added to the return value . * @ param maxBytes A hint toward the maximum number of bytes to include as part of the return value . Note that this * value maybe exceeded because we make a best effort to include at least 1 { @ link ByteBuffer } * in the return value to ensure write progress is made . */ public ByteBuffer [ ] nioBuffers ( int maxCount , long maxBytes ) { } }
assert maxCount > 0 ; assert maxBytes > 0 ; long nioBufferSize = 0 ; int nioBufferCount = 0 ; final InternalThreadLocalMap threadLocalMap = InternalThreadLocalMap . get ( ) ; ByteBuffer [ ] nioBuffers = NIO_BUFFERS . get ( threadLocalMap ) ; Entry entry = flushedEntry ; while ( isFlushedEntry ( entry ) && entry . msg instanceof ByteBuf ) { if ( ! entry . cancelled ) { ByteBuf buf = ( ByteBuf ) entry . msg ; final int readerIndex = buf . readerIndex ( ) ; final int readableBytes = buf . writerIndex ( ) - readerIndex ; if ( readableBytes > 0 ) { if ( maxBytes - readableBytes < nioBufferSize && nioBufferCount != 0 ) { // If the nioBufferSize + readableBytes will overflow maxBytes , and there is at least one entry // we stop populate the ByteBuffer array . This is done for 2 reasons : // 1 . bsd / osx don ' t allow to write more bytes then Integer . MAX _ VALUE with one writev ( . . . ) call // and so will return ' EINVAL ' , which will raise an IOException . On Linux it may work depending // on the architecture and kernel but to be safe we also enforce the limit here . // 2 . There is no sense in putting more data in the array than is likely to be accepted by the // OS . // See also : // - https : / / www . freebsd . org / cgi / man . cgi ? query = write & sektion = 2 // - http : / / linux . die . net / man / 2 / writev break ; } nioBufferSize += readableBytes ; int count = entry . count ; if ( count == - 1 ) { // noinspection ConstantValueVariableUse entry . count = count = buf . nioBufferCount ( ) ; } int neededSpace = min ( maxCount , nioBufferCount + count ) ; if ( neededSpace > nioBuffers . length ) { nioBuffers = expandNioBufferArray ( nioBuffers , neededSpace , nioBufferCount ) ; NIO_BUFFERS . set ( threadLocalMap , nioBuffers ) ; } if ( count == 1 ) { ByteBuffer nioBuf = entry . buf ; if ( nioBuf == null ) { // cache ByteBuffer as it may need to create a new ByteBuffer instance if its a // derived buffer entry . buf = nioBuf = buf . internalNioBuffer ( readerIndex , readableBytes ) ; } nioBuffers [ nioBufferCount ++ ] = nioBuf ; } else { // The code exists in an extra method to ensure the method is not too big to inline as this // branch is not very likely to get hit very frequently . nioBufferCount = nioBuffers ( entry , buf , nioBuffers , nioBufferCount , maxCount ) ; } if ( nioBufferCount == maxCount ) { break ; } } } entry = entry . next ; } this . nioBufferCount = nioBufferCount ; this . nioBufferSize = nioBufferSize ; return nioBuffers ;
public class VersionChecker { /** * Consumes the version field from the given input and raises an exception if the record is in a newer version , * written by a newer version of Hibernate OGM . * @ param input the input to read from * @ param supportedVersion the type version supported by this version of OGM * @ param externalizedType the type to be unmarshalled * @ throws IOException if an error occurs while reading the input */ public static void readAndCheckVersion ( ObjectInput input , int supportedVersion , Class < ? > externalizedType ) throws IOException { } }
int version = input . readInt ( ) ; if ( version != supportedVersion ) { throw LOG . unexpectedKeyVersion ( externalizedType , version , supportedVersion ) ; }
public class CompoundEvent { /** * Commits this transaction by posting this event to the distributed object event queue . All * participating dobjects will have their transaction references cleared and will go back to * normal operation . */ public void commit ( ) { } }
// first clear our target clearTarget ( ) ; // then post this event onto the queue ( but only if we actually // accumulated some events ) int size = _events . size ( ) ; switch ( size ) { case 0 : // nothing doing break ; case 1 : // no point in being compound _omgr . postEvent ( _events . get ( 0 ) ) ; break ; default : // now we ' re talking _transport = _events . get ( 0 ) . getTransport ( ) ; for ( int ii = 1 ; ii < size ; ii ++ ) { _transport = _events . get ( ii ) . getTransport ( ) . combine ( _transport ) ; } _omgr . postEvent ( this ) ; break ; }
public class DeploymentScenario { /** * Get all { @ link DeploymentDescription } defined to be deployed during Test startup for a specific { @ link * TargetDescription } ordered . * @ param target * The Target to filter on * @ return A List of found { @ link DeploymentDescription } . Will return a empty list if none are found . */ public List < Deployment > startupDeploymentsFor ( TargetDescription target ) { } }
Validate . notNull ( target , "Target must be specified" ) ; List < Deployment > startupDeployments = new ArrayList < Deployment > ( ) ; for ( Deployment deployment : deployments ) { DeploymentDescription desc = deployment . getDescription ( ) ; if ( desc . managed ( ) && target . equals ( desc . getTarget ( ) ) ) { startupDeployments . add ( deployment ) ; } } // sort them by order Collections . sort ( startupDeployments , new Comparator < Deployment > ( ) { public int compare ( Deployment o1 , Deployment o2 ) { return new Integer ( o1 . getDescription ( ) . getOrder ( ) ) . compareTo ( o2 . getDescription ( ) . getOrder ( ) ) ; } } ) ; return Collections . unmodifiableList ( startupDeployments ) ;
public class AudienceSegmentCriteria { /** * Sets the operator value for this AudienceSegmentCriteria . * @ param operator * The comparison operator . This attribute is required . */ public void setOperator ( com . google . api . ads . admanager . axis . v201811 . AudienceSegmentCriteriaComparisonOperator operator ) { } }
this . operator = operator ;
public class InternalXtextParser { /** * InternalXtext . g : 2170:1 : rulePredicatedKeyword returns [ EObject current = null ] : ( ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) ( ( lv _ value _ 2_0 = RULE _ STRING ) ) ) ; */ public final EObject rulePredicatedKeyword ( ) throws RecognitionException { } }
EObject current = null ; Token lv_predicated_0_0 = null ; Token lv_firstSetPredicated_1_0 = null ; Token lv_value_2_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 2176:2 : ( ( ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) ( ( lv _ value _ 2_0 = RULE _ STRING ) ) ) ) // InternalXtext . g : 2177:2 : ( ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) ( ( lv _ value _ 2_0 = RULE _ STRING ) ) ) { // InternalXtext . g : 2177:2 : ( ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) ( ( lv _ value _ 2_0 = RULE _ STRING ) ) ) // InternalXtext . g : 2178:3 : ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) ( ( lv _ value _ 2_0 = RULE _ STRING ) ) { // InternalXtext . g : 2178:3 : ( ( ( lv _ predicated _ 0_0 = ' = > ' ) ) | ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) ) int alt50 = 2 ; int LA50_0 = input . LA ( 1 ) ; if ( ( LA50_0 == 42 ) ) { alt50 = 1 ; } else if ( ( LA50_0 == 43 ) ) { alt50 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 50 , 0 , input ) ; throw nvae ; } switch ( alt50 ) { case 1 : // InternalXtext . g : 2179:4 : ( ( lv _ predicated _ 0_0 = ' = > ' ) ) { // InternalXtext . g : 2179:4 : ( ( lv _ predicated _ 0_0 = ' = > ' ) ) // InternalXtext . g : 2180:5 : ( lv _ predicated _ 0_0 = ' = > ' ) { // InternalXtext . g : 2180:5 : ( lv _ predicated _ 0_0 = ' = > ' ) // InternalXtext . g : 2181:6 : lv _ predicated _ 0_0 = ' = > ' { lv_predicated_0_0 = ( Token ) match ( input , 42 , FollowSets000 . FOLLOW_11 ) ; newLeafNode ( lv_predicated_0_0 , grammarAccess . getPredicatedKeywordAccess ( ) . getPredicatedEqualsSignGreaterThanSignKeyword_0_0_0 ( ) ) ; if ( current == null ) { current = createModelElement ( grammarAccess . getPredicatedKeywordRule ( ) ) ; } setWithLastConsumed ( current , "predicated" , true , "=>" ) ; } } } break ; case 2 : // InternalXtext . g : 2194:4 : ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) { // InternalXtext . g : 2194:4 : ( ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) ) // InternalXtext . g : 2195:5 : ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) { // InternalXtext . g : 2195:5 : ( lv _ firstSetPredicated _ 1_0 = ' - > ' ) // InternalXtext . g : 2196:6 : lv _ firstSetPredicated _ 1_0 = ' - > ' { lv_firstSetPredicated_1_0 = ( Token ) match ( input , 43 , FollowSets000 . FOLLOW_11 ) ; newLeafNode ( lv_firstSetPredicated_1_0 , grammarAccess . getPredicatedKeywordAccess ( ) . getFirstSetPredicatedHyphenMinusGreaterThanSignKeyword_0_1_0 ( ) ) ; if ( current == null ) { current = createModelElement ( grammarAccess . getPredicatedKeywordRule ( ) ) ; } setWithLastConsumed ( current , "firstSetPredicated" , true , "->" ) ; } } } break ; } // InternalXtext . g : 2209:3 : ( ( lv _ value _ 2_0 = RULE _ STRING ) ) // InternalXtext . g : 2210:4 : ( lv _ value _ 2_0 = RULE _ STRING ) { // InternalXtext . g : 2210:4 : ( lv _ value _ 2_0 = RULE _ STRING ) // InternalXtext . g : 2211:5 : lv _ value _ 2_0 = RULE _ STRING { lv_value_2_0 = ( Token ) match ( input , RULE_STRING , FollowSets000 . FOLLOW_2 ) ; newLeafNode ( lv_value_2_0 , grammarAccess . getPredicatedKeywordAccess ( ) . getValueSTRINGTerminalRuleCall_1_0 ( ) ) ; if ( current == null ) { current = createModelElement ( grammarAccess . getPredicatedKeywordRule ( ) ) ; } setWithLastConsumed ( current , "value" , lv_value_2_0 , "org.eclipse.xtext.common.Terminals.STRING" ) ; } } } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class ScopeContext { /** * return the session count of this application context * @ return */ public int getAppContextSessionCount ( PageContext pc ) { } }
ApplicationContext appContext = pc . getApplicationContext ( ) ; if ( pc . getSessionType ( ) == Config . SESSION_TYPE_JEE ) return 0 ; Map < String , Scope > context = getSubMap ( cfSessionContexts , appContext . getName ( ) ) ; return getCount ( context ) ;
public class HdfsOutputSwitcher { /** * メッセージをHDFSに出力する 。 * @ param target 出力内容 * @ param nowTime 出力時刻 * @ throws IOException 入出力エラー発生時 */ public void append ( String target , long nowTime ) throws IOException { } }
if ( this . nextSwitchTime <= nowTime ) { switchWriter ( nowTime ) ; } this . currentWriter . append ( target ) ;
public class Router { /** * 记录路由路径记录 * @ param routerName 路由名字 * @ since 5.2.0 */ protected void recordRouterWay ( String routerName ) { } }
if ( RpcInternalContext . isAttachmentEnable ( ) ) { RpcInternalContext context = RpcInternalContext . getContext ( ) ; String record = ( String ) context . getAttachment ( RpcConstants . INTERNAL_KEY_ROUTER_RECORD ) ; record = record == null ? routerName : record + ">" + routerName ; context . setAttachment ( RpcConstants . INTERNAL_KEY_ROUTER_RECORD , record ) ; }
public class Fingerprint { /** * Parses the input status and returns a tag map . * @ param ufsName the name of the ufs , should be { @ link UnderFileSystem # getUnderFSType ( ) } * @ param status the { @ link UfsStatus } to create the tagmap from * @ return the tag map object */ private static Map < Tag , String > createTags ( String ufsName , UfsStatus status ) { } }
Map < Tag , String > tagMap = new HashMap < > ( ) ; tagMap . put ( Tag . UFS , ufsName ) ; tagMap . put ( Tag . OWNER , status . getOwner ( ) ) ; tagMap . put ( Tag . GROUP , status . getGroup ( ) ) ; tagMap . put ( Tag . MODE , String . valueOf ( status . getMode ( ) ) ) ; if ( status instanceof UfsFileStatus ) { tagMap . put ( Tag . TYPE , Type . FILE . name ( ) ) ; tagMap . put ( Tag . CONTENT_HASH , ( ( UfsFileStatus ) status ) . getContentHash ( ) ) ; } else { tagMap . put ( Tag . TYPE , Type . DIRECTORY . name ( ) ) ; } return tagMap ;
public class FilterStreamXMLStreamWriter { /** * Write the XML Declaration . * @ param encoding the XML version * @ param version the XML encoding * @ throws FilterException */ public void writeStartDocument ( String encoding , String version ) throws FilterException { } }
try { this . writer . writeStartDocument ( encoding , version ) ; } catch ( XMLStreamException e ) { throw new FilterException ( "Failed to write start document" , e ) ; }
public class CompletableFuture { /** * Completes with the given ( non - null ) exceptional result as a wrapped CompletionException unless * it is one already , unless already completed . May complete with the given Object r ( which must * have been the result of a source future ) if it is equivalent , i . e . if this is a simple * propagation of an existing CompletionException . */ final boolean completeThrowable ( Throwable x , Object r ) { } }
return U . compareAndSwapObject ( this , RESULT , null , encodeThrowable ( x , r ) ) ;
public class ClosureCodingConvention { /** * Extracts X from goog . provide ( ' X ' ) , if the applied Node is goog . * @ return The extracted class name , or null . */ @ Override public String extractClassNameIfProvide ( Node node , Node parent ) { } }
String namespace = extractClassNameIfGoog ( node , parent , "goog.provide" ) ; if ( namespace == null ) { namespace = extractClassNameIfGoog ( node , parent , "goog.module" ) ; } return namespace ;
public class ArrayIndexOutOfBounds { /** * overrides the visitor to collect parameter registers * @ param obj * the code block of the currently parsed method */ @ Override public void visitCode ( Code obj ) { } }
Method m = getMethod ( ) ; stack . resetForMethodEntry ( this ) ; initializedRegs . clear ( ) ; modifyRegs . clear ( ) ; Type [ ] argTypes = m . getArgumentTypes ( ) ; int arg = m . isStatic ( ) ? 0 : 1 ; for ( Type argType : argTypes ) { String argSig = argType . getSignature ( ) ; initializedRegs . set ( arg ) ; arg += SignatureUtils . getSignatureSize ( argSig ) ; } nullStoreToLocation . clear ( ) ; super . visitCode ( obj ) ; for ( Integer pc : nullStoreToLocation . values ( ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . AIOB_ARRAY_STORE_TO_NULL_REFERENCE . name ( ) , HIGH_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this , pc . intValue ( ) ) ) ; }
public class Command { /** * Extract a double array from a CORBA Any object . * @ param in The CORBA Any object * @ return The extracted double array * @ exception DevFailed If the Any object does not contains a data of the * waited type . * Click < a href = " . . / . . / tango _ basic / idl _ html / Tango . html # DevFailed " > here < / a > to read * < b > DevFailed < / b > exception specification */ public double [ ] extract_DevVarDoubleArray ( Any in ) throws DevFailed { } }
double [ ] data = null ; try { data = DevVarDoubleArrayHelper . extract ( in ) ; } catch ( BAD_OPERATION ex ) { throw_bad_type ( "DevVarDoubleArray" ) ; } return data ;
public class ImageStatistics { /** * Returns the sum of all the pixels in the image . * @ param img Input image . Not modified . */ public static int sum ( GrayS16 img ) { } }
if ( BoofConcurrency . USE_CONCURRENT ) { return ImplImageStatistics_MT . sum ( img ) ; } else { return ImplImageStatistics . sum ( img ) ; }
public class ApplicationGatewaysInner { /** * Creates or updates the specified application gateway . * @ param resourceGroupName The name of the resource group . * @ param applicationGatewayName The name of the application gateway . * @ param parameters Parameters supplied to the create or update application gateway operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ApplicationGatewayInner object if successful . */ public ApplicationGatewayInner beginCreateOrUpdate ( String resourceGroupName , String applicationGatewayName , ApplicationGatewayInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , applicationGatewayName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class policyhttpcallout { /** * Use this API to fetch all the policyhttpcallout resources that are configured on netscaler . */ public static policyhttpcallout [ ] get ( nitro_service service ) throws Exception { } }
policyhttpcallout obj = new policyhttpcallout ( ) ; policyhttpcallout [ ] response = ( policyhttpcallout [ ] ) obj . get_resources ( service ) ; return response ;
public class RecastArea { /** * / @ see rcCompactHeightfield , rcMedianFilterWalkableArea */ public void markCylinderArea ( Context ctx , float [ ] pos , float r , float h , AreaModification areaMod , CompactHeightfield chf ) { } }
ctx . startTimer ( "MARK_CYLINDER_AREA" ) ; float bmin [ ] = new float [ 3 ] , bmax [ ] = new float [ 3 ] ; bmin [ 0 ] = pos [ 0 ] - r ; bmin [ 1 ] = pos [ 1 ] ; bmin [ 2 ] = pos [ 2 ] - r ; bmax [ 0 ] = pos [ 0 ] + r ; bmax [ 1 ] = pos [ 1 ] + h ; bmax [ 2 ] = pos [ 2 ] + r ; float r2 = r * r ; int minx = ( int ) ( ( bmin [ 0 ] - chf . bmin [ 0 ] ) / chf . cs ) ; int miny = ( int ) ( ( bmin [ 1 ] - chf . bmin [ 1 ] ) / chf . ch ) ; int minz = ( int ) ( ( bmin [ 2 ] - chf . bmin [ 2 ] ) / chf . cs ) ; int maxx = ( int ) ( ( bmax [ 0 ] - chf . bmin [ 0 ] ) / chf . cs ) ; int maxy = ( int ) ( ( bmax [ 1 ] - chf . bmin [ 1 ] ) / chf . ch ) ; int maxz = ( int ) ( ( bmax [ 2 ] - chf . bmin [ 2 ] ) / chf . cs ) ; if ( maxx < 0 ) return ; if ( minx >= chf . width ) return ; if ( maxz < 0 ) return ; if ( minz >= chf . height ) return ; if ( minx < 0 ) minx = 0 ; if ( maxx >= chf . width ) maxx = chf . width - 1 ; if ( minz < 0 ) minz = 0 ; if ( maxz >= chf . height ) maxz = chf . height - 1 ; for ( int z = minz ; z <= maxz ; ++ z ) { for ( int x = minx ; x <= maxx ; ++ x ) { CompactCell c = chf . cells [ x + z * chf . width ] ; for ( int i = c . index , ni = c . index + c . count ; i < ni ; ++ i ) { CompactSpan s = chf . spans [ i ] ; if ( chf . areas [ i ] == RC_NULL_AREA ) continue ; if ( s . y >= miny && s . y <= maxy ) { float sx = chf . bmin [ 0 ] + ( x + 0.5f ) * chf . cs ; float sz = chf . bmin [ 2 ] + ( z + 0.5f ) * chf . cs ; float dx = sx - pos [ 0 ] ; float dz = sz - pos [ 2 ] ; if ( dx * dx + dz * dz < r2 ) { chf . areas [ i ] = areaMod . apply ( chf . areas [ i ] ) ; } } } } } ctx . stopTimer ( "MARK_CYLINDER_AREA" ) ;
public class IntervalTaggerWrapper { /** * invokes the IntervalTagger ' s process method . */ public void process ( JCas jcas ) { } }
try { tagger . process ( jcas ) ; } catch ( AnalysisEngineProcessException e ) { e . printStackTrace ( ) ; }