signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class IntCounter { /** * Finds and returns the key in this Counter with the largest count . * Ties are broken by comparing the objects using the given tie breaking * Comparator , favoring Objects that are sorted to the front . This is useful * if the keys are numeric and there is a bias to prefer smaller or larger * values , and can be useful in other circumstances where random tie - breaking * is not desirable . Returns null if this Counter is empty . */ public E argmax ( Comparator < E > tieBreaker ) { } }
int max = Integer . MIN_VALUE ; E argmax = null ; for ( E key : keySet ( ) ) { int count = getIntCount ( key ) ; if ( argmax == null || count > max || ( count == max && tieBreaker . compare ( key , argmax ) < 0 ) ) { max = count ; argmax = key ; } } return argmax ;
public class CacheManager { /** * Handle the purge cache option . */ protected void handlePurgeCache ( ) { } }
reportable . output ( "Purging all resources from the cache" ) ; try { cacheMgrService . purgeResources ( ) ; } catch ( Exception e ) { reportable . error ( "Unable to remove all resources in the cache" ) ; reportable . error ( "Reason: " + e . getMessage ( ) ) ; bail ( GENERAL_FAILURE ) ; }
public class RowIndex { /** * cleans up deleted columns from cassandra cleanup compaction * @ param key The partition key of the physical row to be deleted . */ @ Override public void delete ( DecoratedKey key , OpOrder . Group opGroup ) { } }
Log . debug ( "Removing row %s from index %s" , key , logName ) ; lock . writeLock ( ) . lock ( ) ; try { rowService . delete ( key ) ; rowService = null ; } catch ( RuntimeException e ) { Log . error ( e , "Error deleting row %s" , key ) ; throw e ; } finally { lock . writeLock ( ) . unlock ( ) ; }
public class Histogram { /** * Clean up an arbitrary collection of ranges . * Ranges are split at their intersection point . Ranges with a count of zero * are omitted . * @ param imed An arbitrary collection of ranges . The operations on this * list will be destructive . * @ return An ordered list of ranges , none of which intersect eachother . */ private static List < RangeWithCount > cleanup_ ( List < RangeWithCount > imed ) { } }
final Comparator < RangeWithCount > cmp = Comparator . comparing ( ( RangeWithCount range_count ) -> range_count . getRange ( ) . getFloor ( ) ) . thenComparing ( Comparator . comparing ( ( RangeWithCount range_count ) -> range_count . getRange ( ) . getCeil ( ) ) ) ; final List < RangeWithCount > result = new ArrayList < > ( imed . size ( ) ) ; sort ( imed , cmp ) ; while ( imed . size ( ) >= 2 ) { final RangeWithCount head = imed . remove ( 0 ) ; final RangeWithCount succ = imed . get ( 0 ) ; // Merge adjecent ranges . if ( head . getRange ( ) . equals ( succ . getRange ( ) ) ) { succ . setCount ( succ . getCount ( ) + head . getCount ( ) ) ; continue ; } // Move elements from extending range . if ( head . getRange ( ) . getFloor ( ) == succ . getRange ( ) . getFloor ( ) ) { final double mid = head . getRange ( ) . getCeil ( ) ; final double ceil = succ . getRange ( ) . getCeil ( ) ; final double succ_range = succ . getRange ( ) . getWidth ( ) ; final double succ_left_fraction = ( mid - succ . getRange ( ) . getFloor ( ) ) / succ_range ; final double succ_right_fraction = 1 - succ_left_fraction ; head . setCount ( head . getCount ( ) + succ_left_fraction * succ . getCount ( ) ) ; succ . setCount ( succ_right_fraction * succ . getCount ( ) ) ; succ . setRange ( new Range ( mid , ceil ) ) ; imed . add ( 0 , head ) ; sort ( imed , cmp ) ; continue ; } // Emit disjunt head range . if ( head . getRange ( ) . getCeil ( ) <= succ . getRange ( ) . getFloor ( ) ) { if ( Math . signum ( head . getCount ( ) ) != 0 ) result . add ( head ) ; continue ; } // head . floor < succ . floor < head . ceil assert ( head . getRange ( ) . getFloor ( ) < succ . getRange ( ) . getFloor ( ) ) ; assert ( succ . getRange ( ) . getFloor ( ) < head . getRange ( ) . getCeil ( ) ) ; // Head is intersected by succ , split it in two , at the succ . floor boundary . final double floor = head . getRange ( ) . getFloor ( ) ; final double ceil = succ . getRange ( ) . getFloor ( ) ; final double head_range = head . getRange ( ) . getWidth ( ) ; final double head_left_fraction = ( ceil - floor ) / head_range ; final double head_right_fraction = 1 - head_left_fraction ; imed . add ( 0 , head ) ; imed . add ( 0 , new RangeWithCount ( new Range ( floor , ceil ) , head_left_fraction * head . getCount ( ) ) ) ; head . setRange ( new Range ( ceil , head . getRange ( ) . getCeil ( ) ) ) ; head . setCount ( head_right_fraction * head . getCount ( ) ) ; sort ( imed , cmp ) ; } imed . stream ( ) . filter ( rwc -> Math . signum ( rwc . getCount ( ) ) != 0 ) . forEach ( result :: add ) ; // Merge adjecent entries , if they have the same distribution . for ( int i = 0 ; i < result . size ( ) - 1 ; ) { final RangeWithCount pred = result . get ( i ) ; final RangeWithCount succ = result . get ( i + 1 ) ; final double pred_range = pred . getRange ( ) . getWidth ( ) ; final double succ_range = succ . getRange ( ) . getWidth ( ) ; if ( pred . getRange ( ) . getCeil ( ) == succ . getRange ( ) . getFloor ( ) && pred . getCount ( ) * succ_range == succ . getCount ( ) * pred_range ) { result . remove ( i ) ; succ . setRange ( new Range ( pred . getRange ( ) . getFloor ( ) , succ . getRange ( ) . getCeil ( ) ) ) ; succ . setCount ( succ . getCount ( ) + pred . getCount ( ) ) ; } else { ++ i ; } } return result ;
public class MockResponse { /** * Adds a new header with the name and value . This may be used to add multiple headers with the * same name . Unlike { @ link # addHeader ( String , Object ) } this does not validate the name and * value . */ public MockResponse addHeaderLenient ( String name , Object value ) { } }
InternalKtKt . addHeaderLenient ( headers , name , String . valueOf ( value ) ) ; return this ;
public class OnPremisesTagSetMarshaller { /** * Marshall the given parameter object . */ public void marshall ( OnPremisesTagSet onPremisesTagSet , ProtocolMarshaller protocolMarshaller ) { } }
if ( onPremisesTagSet == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( onPremisesTagSet . getOnPremisesTagSetList ( ) , ONPREMISESTAGSETLIST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GeoShapeBase { /** * This is used to fix Kryo serialization issues with lists generated from methods such as * { @ link java . util . Arrays # asList ( Object [ ] ) } */ protected < T > List < T > toArrayList ( List < T > list ) { } }
if ( list == null ) { return null ; } if ( ! list . getClass ( ) . equals ( ArrayList . class ) ) { list = new ArrayList < > ( list ) ; } return list ;
public class NioGroovyMethods { /** * Create a new ObjectInputStream for this file associated with the given class loader and pass it to the closure . * This method ensures the stream is closed after the closure returns . * @ param self a Path * @ param classLoader the class loader to use when loading the class * @ param closure a closure * @ return the value returned by the closure * @ throws java . io . IOException if an IOException occurs . * @ see org . codehaus . groovy . runtime . IOGroovyMethods # withStream ( java . io . InputStream , groovy . lang . Closure ) * @ since 2.3.0 */ public static < T > T withObjectInputStream ( Path self , ClassLoader classLoader , @ ClosureParams ( value = SimpleType . class , options = "java.io.ObjectInputStream" ) Closure < T > closure ) throws IOException { } }
return IOGroovyMethods . withStream ( newObjectInputStream ( self , classLoader ) , closure ) ;
public class HttpClient { /** * DELETE requests */ public HttpResponse delete ( String path ) throws APIException { } }
final HttpDelete request = new HttpDelete ( getUrl ( path ) ) ; return execute ( request ) ;
public class CosineTextSimilarity { /** * 判定相似度的方式 : 余弦相似度 * 余弦夹角原理 : * 向量a = ( x1 , y1 ) , 向量b = ( x2 , y2) * similarity = a . b / | a | * | b | * a . b = x1x2 + y1y2 * | a | = 根号 [ ( x1 ) ^ 2 + ( y1 ) ^ 2 ] , | b | = 根号 [ ( x2 ) ^ 2 + ( y2 ) ^ 2] * @ param words1 词列表1 * @ param words2 词列表2 * @ return 相似度分值 */ @ Override protected double scoreImpl ( List < Word > words1 , List < Word > words2 ) { } }
// 用词频来标注词的权重 taggingWeightWithWordFrequency ( words1 , words2 ) ; // 构造权重快速搜索容器 Map < String , Float > weights1 = toFastSearchMap ( words1 ) ; Map < String , Float > weights2 = toFastSearchMap ( words2 ) ; // 所有的不重复词 Set < Word > words = new HashSet < > ( ) ; words . addAll ( words1 ) ; words . addAll ( words2 ) ; // 向量的维度为words的大小 , 每一个维度的权重是词频 // a . b AtomicFloat ab = new AtomicFloat ( ) ; // | a | 的平方 AtomicFloat aa = new AtomicFloat ( ) ; // | b | 的平方 AtomicFloat bb = new AtomicFloat ( ) ; // 计算 words . parallelStream ( ) . forEach ( word -> { Float x1 = weights1 . get ( word . getText ( ) ) ; Float x2 = weights2 . get ( word . getText ( ) ) ; if ( x1 != null && x2 != null ) { // x1x2 float oneOfTheDimension = x1 * x2 ; ab . addAndGet ( oneOfTheDimension ) ; } if ( x1 != null ) { // ( x1 ) ^ 2 float oneOfTheDimension = x1 * x1 ; aa . addAndGet ( oneOfTheDimension ) ; } if ( x2 != null ) { // ( x2 ) ^ 2 float oneOfTheDimension = x2 * x2 ; bb . addAndGet ( oneOfTheDimension ) ; } } ) ; double aaa = Math . sqrt ( aa . doubleValue ( ) ) ; double bbb = Math . sqrt ( bb . doubleValue ( ) ) ; // 使用BigDecimal保证精确计算浮点数 // | a | * | b | // double aabb = aaa * bbb ; BigDecimal aabb = BigDecimal . valueOf ( aaa ) . multiply ( BigDecimal . valueOf ( bbb ) ) ; // similarity = a . b / | a | * | b | // double cos = ab . get ( ) / aabb . doubleValue ( ) ; double cos = BigDecimal . valueOf ( ab . get ( ) ) . divide ( aabb , 9 , BigDecimal . ROUND_HALF_UP ) . doubleValue ( ) ; return cos ;
public class TransactionRequestProcessor { /** * Validates if the value read is still valid and the write operation can proceed . */ private boolean isValid ( TransactionWrite write , AdvancedCache < byte [ ] , byte [ ] > readCache ) { } }
if ( write . skipRead ( ) ) { if ( isTrace ) { log . tracef ( "Operation %s wasn't read." , write ) ; } return true ; } CacheEntry < byte [ ] , byte [ ] > entry = readCache . getCacheEntry ( write . key ) ; if ( write . wasNonExisting ( ) ) { if ( isTrace ) { log . tracef ( "Key didn't exist for operation %s. Entry is %s" , write , entry ) ; } return entry == null || entry . getValue ( ) == null ; } if ( isTrace ) { log . tracef ( "Checking version for operation %s. Entry is %s" , write , entry ) ; } return entry != null && write . versionRead == MetadataUtils . extractVersion ( entry ) ;
public class CmsHelpTemplateBean { /** * Determines the mapped help page for a given workplace resource URI . < p > * If a mapping information is found , the requested URI is set to the found value . < p > * If no workplace resource URI is given , nothing is changed . < p > */ protected void getMappedHelpUri ( ) { } }
try { getJsp ( ) . getRequestContext ( ) . setCurrentProject ( m_onlineProject ) ; if ( CmsStringUtil . isNotEmpty ( getParamWorkplaceresource ( ) ) ) { // found a workplace resource parameter , try to get a mapping for it String helpResource = null ; String wpResource = getParamWorkplaceresource ( ) ; int xmlPageId ; try { xmlPageId = OpenCms . getResourceManager ( ) . getResourceType ( CmsResourceTypeXmlPage . getStaticTypeName ( ) ) . getTypeId ( ) ; } catch ( CmsLoaderException e1 ) { xmlPageId = CmsResourceTypeXmlPage . getStaticTypeId ( ) ; } if ( getCms ( ) . existsResource ( resolveMacros ( getParamWorkplaceresource ( ) ) , CmsResourceFilter . requireType ( xmlPageId ) ) ) { // given workplace resource is a page in VFS , use it as start point helpResource = resolveMacros ( getParamWorkplaceresource ( ) ) ; setParamHomelink ( getJsp ( ) . link ( helpResource ) ) ; } else { // given workplace resource does not exist , resolve mapping try { // try to read the mappings from the current module String absolutePath = OpenCms . getSystemInfo ( ) . getAbsoluteRfsPathRelativeToWebInf ( resolveMacros ( RFS_HELPMAPPINGS ) ) ; CmsParameterConfiguration props = new CmsParameterConfiguration ( absolutePath ) ; // remove context from workplace path wpResource = CmsLinkManager . removeOpenCmsContext ( wpResource ) ; // determine mapping for workplace resource while ( ( wpResource != null ) && CmsStringUtil . isEmpty ( helpResource ) ) { helpResource = props . getString ( wpResource , null ) ; wpResource = CmsResource . getParentFolder ( wpResource ) ; } } catch ( IOException e ) { // no mappings found in module , ignore } if ( CmsStringUtil . isEmpty ( helpResource ) ) { // no mapping found , use default help URI helpResource = DEFAULT_HELPFILE ; } // create path to the help resource helpResource = resolveMacros ( PATH_HELP ) + helpResource ; if ( ! getCms ( ) . existsResource ( helpResource , CmsResourceFilter . IGNORE_EXPIRATION ) ) { helpResource = resolveMacros ( PATH_HELP ) + DEFAULT_HELPFILE ; } setParamHomelink ( getJsp ( ) . link ( resolveMacros ( PATH_HELP ) + DEFAULT_HELPFILE ) ) ; } // set URI to found help page URI getJsp ( ) . getRequestContext ( ) . setUri ( helpResource ) ; } } finally { getJsp ( ) . getRequestContext ( ) . setCurrentProject ( m_offlineProject ) ; }
public class OkHttpBinaryRequest { /** * Inspired from Guava com . google . common . io . ByteStreams */ protected void readBytes ( final InputStream in , final ProgressByteProcessor processor ) throws IOException { } }
final byte [ ] buf = new byte [ BUF_SIZE ] ; try { int amt ; do { amt = in . read ( buf ) ; if ( amt == - 1 ) { break ; } } while ( processor . processBytes ( buf , 0 , amt ) ) ; } finally { IOUtils . closeQuietly ( in ) ; }
public class FileUtils { /** * Shortcut implementation that determines the substring after the last Windows or * nix * path separator . * @ param path The path to return filename of . * @ return The part of the specified part after the last slash or backslash . */ public static String fileNameOf ( String path ) { } }
return path . substring ( Math . max ( path . lastIndexOf ( '/' ) , path . lastIndexOf ( '\\' ) ) + 1 ) ;
public class DownloadRequestQueue { /** * To check if the request is downloading according to download url . * @ param uri the uri to check * @ return true if the request is downloading , otherwise return false */ DownloadState query ( Uri uri ) { } }
synchronized ( currentRequests ) { for ( DownloadRequest request : currentRequests ) { if ( request . uri ( ) . toString ( ) . equals ( uri . toString ( ) ) ) { return request . downloadState ( ) ; } } } return DownloadState . INVALID ;
public class RawResponse { /** * Write response body to file */ public void writeToFile ( Path path ) { } }
try { try ( OutputStream os = Files . newOutputStream ( path ) ) { InputStreams . transferTo ( body ( ) , os ) ; } } catch ( IOException e ) { throw new RequestsException ( e ) ; } finally { close ( ) ; }
public class Filters { /** * Constructs a day filter based on a BYDAY rule . * @ param monthDays days of the month ( values must be in range [ - 31,31 ] ) * @ return the filter */ static Predicate < DateValue > byMonthDayFilter ( final int [ ] monthDays ) { } }
return new Predicate < DateValue > ( ) { private static final long serialVersionUID = - 1618039447294490037L ; public boolean apply ( DateValue date ) { int nDays = TimeUtils . monthLength ( date . year ( ) , date . month ( ) ) ; for ( int i = monthDays . length - 1 ; i >= 0 ; i -- ) { int day = monthDays [ i ] ; if ( day < 0 ) { day += nDays + 1 ; } if ( day == date . day ( ) ) { return true ; } } return false ; } } ;
public class Tools { /** * Gets default bitext rules for a given pair of languages * @ param source Source language . * @ param target Target language . * @ param externalBitextRuleFile external file with bitext rules * @ return List of Bitext rules * @ since 2.9 */ public static List < BitextRule > getBitextRules ( Language source , Language target , File externalBitextRuleFile ) throws IOException , ParserConfigurationException , SAXException { } }
List < BitextRule > bRules = new ArrayList < > ( ) ; // try to load the bitext pattern rules for the language . . . BitextPatternRuleLoader ruleLoader = new BitextPatternRuleLoader ( ) ; String name = "/" + target . getShortCode ( ) + "/bitext.xml" ; if ( JLanguageTool . getDataBroker ( ) . ruleFileExists ( name ) ) { InputStream is = JLanguageTool . getDataBroker ( ) . getFromRulesDirAsStream ( name ) ; if ( is != null ) { bRules . addAll ( ruleLoader . getRules ( is , name ) ) ; } } if ( externalBitextRuleFile != null ) { bRules . addAll ( ruleLoader . getRules ( new FileInputStream ( externalBitextRuleFile ) , externalBitextRuleFile . getAbsolutePath ( ) ) ) ; } // load the false friend rules in the bitext mode : FalseFriendsAsBitextLoader fRuleLoader = new FalseFriendsAsBitextLoader ( ) ; String falseFriendsFile = "/false-friends.xml" ; List < BitextPatternRule > rules = fRuleLoader . getFalseFriendsAsBitext ( falseFriendsFile , source , target ) ; bRules . addAll ( rules ) ; // load Java bitext rules : bRules . addAll ( getAllBuiltinBitextRules ( source , null ) ) ; return bRules ;
public class Toothpick { /** * Opens multiple scopes in a row . * Opened scopes will be children of each other in left to right order ( e . g . { @ code * openScopes ( a , b ) } opens scopes { @ code a } and { @ code b } * and { @ code b } is a child of { @ code a } . * @ param names of the scopes to open hierarchically . * @ return the last opened scope , leaf node of the created subtree of scopes . */ public static Scope openScopes ( Object ... names ) { } }
if ( names == null ) { throw new IllegalArgumentException ( "null scope names are not allowed." ) ; } if ( names . length == 0 ) { throw new IllegalArgumentException ( "Minimally, one scope name is required." ) ; } ScopeNode lastScope = null ; ScopeNode previousScope = ( ScopeNode ) openScope ( names [ 0 ] , true ) ; for ( int i = 1 ; i < names . length ; i ++ ) { lastScope = ( ScopeNode ) openScope ( names [ i ] , false ) ; lastScope = previousScope . addChild ( lastScope ) ; previousScope = lastScope ; } return previousScope ;
public class MessageProcessorMatching { /** * Concatenates topicspace and a topic expression with a level separator between * Null or empty topics are treated as topics at the root level so the * combination topic becomes topicSpace / * @ param topicSpace The topicspace name * @ param discriminator The topic */ private String buildSendTopicExpression ( String destName , String discriminator ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "buildSendTopicExpression" , new Object [ ] { destName , discriminator } ) ; String combo = null ; if ( discriminator == null || discriminator . trim ( ) . length ( ) == 0 ) combo = destName ; else combo = destName + MatchSpace . SUBTOPIC_SEPARATOR_CHAR + discriminator ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "buildSendTopicExpression" , combo ) ; return combo ;
public class AbstractFileClient { /** * PROTECTED AND OVERRIDEABLE IMPLEMENTATION METHODS */ protected Asset processJSON ( final InputStream jsonInputStream ) throws FileNotFoundException , IOException , BadVersionException { } }
// id is the file path from the root . Asset ass = JSONAssetConverter . readValue ( jsonInputStream ) ; // TODO : Should we confirm the asset is in the right location . For example a sample might be in the " blah " directory ? return ass ;
public class ImplementedMethods { /** * Search in the already found methods ' list and check if it contains * a method which is overriding the method parameter or is the method * parameter itself . * @ param method MethodDoc Method to be searched in the Method List for * an overriding method . */ private boolean overridingMethodFound ( MethodDoc method ) { } }
ClassDoc containingClass = method . containingClass ( ) ; for ( MethodDoc listmethod : methlist ) { if ( containingClass == listmethod . containingClass ( ) ) { // it ' s the same method . return true ; } ClassDoc cd = listmethod . overriddenClass ( ) ; if ( cd == null ) { continue ; } if ( cd == containingClass || cd . subclassOf ( containingClass ) ) { return true ; } } return false ;
public class FileStatusExtended { /** * Comapre two arrays of blocks . If the file is open , do not compare * sizes of the blocks . */ private boolean blocksEquals ( Block [ ] a1 , Block [ ] a2 , boolean closedFile ) { } }
if ( a1 == a2 ) return true ; if ( a1 == null || a2 == null || a2 . length != a1 . length ) return false ; for ( int i = 0 ; i < a1 . length ; i ++ ) { Block b1 = a1 [ i ] ; Block b2 = a2 [ i ] ; if ( b1 == b2 ) continue ; if ( b1 == null || b2 == null ) return false ; // compare ids and gen stamps if ( ! ( b1 . getBlockId ( ) == b2 . getBlockId ( ) && b1 . getGenerationStamp ( ) == b2 . getGenerationStamp ( ) ) ) return false ; // for open files check len - 2 blocks only if ( ! closedFile && i >= a1 . length - 2 ) continue ; // check block size if ( b1 . getNumBytes ( ) != b2 . getNumBytes ( ) ) return false ; } return true ;
public class IntTupleIterables { /** * Creates an iterable from the given delegate that creates iterators * that return the { @ link MutableIntTuple } s created by the iterators of * the given delegate that are contained in the given bounds . < br > * < br > * < b > Note : < / b > The caller is responsible for making sure that * the tuples returned by the given delegate have the same * { @ link Tuple # getSize ( ) } as the given bounds . This method can * not check this internally . If the sizes are different , the * behavior of the returned iterable is unspecified . * @ param min The minimum , inclusive . A copy of this tuple will be * stored internally . * @ param max The maximum , exclusive . A copy of this tuple will be * stored internally . * @ param delegate The delegate * @ return The iterable */ public static Iterable < MutableIntTuple > clampingIterable ( IntTuple min , IntTuple max , final Iterable < ? extends MutableIntTuple > delegate ) { } }
Objects . requireNonNull ( delegate , "The delegate is null" ) ; IntTuple localMin = IntTuples . copy ( min ) ; IntTuple localMax = IntTuples . copy ( max ) ; return ( ) -> IntTupleIterators . clampingIteratorInternal ( localMin , localMax , delegate . iterator ( ) ) ;
public class MemberFilterActionGroup { /** * Saves the state of the filter actions in a memento . * @ param memento the memento to which the state is saved */ public void saveState ( IMemento memento ) { } }
memento . putString ( TAG_HIDEFIELDS , String . valueOf ( hasMemberFilter ( FILTER_FIELDS ) ) ) ; memento . putString ( TAG_HIDESTATIC , String . valueOf ( hasMemberFilter ( FILTER_STATIC ) ) ) ; memento . putString ( TAG_HIDENONPUBLIC , String . valueOf ( hasMemberFilter ( FILTER_NONPUBLIC ) ) ) ; memento . putString ( TAG_HIDELOCALTYPES , String . valueOf ( hasMemberFilter ( FILTER_LOCALTYPES ) ) ) ;
public class DB { /** * Updates the title for a post . * @ param postId The post to update . * @ param title The new title . * @ return Was the post modified ? * @ throws SQLException on database error or missing post id . */ public boolean updatePostTitle ( long postId , final String title ) throws SQLException { } }
Connection conn = null ; PreparedStatement stmt = null ; Timer . Context ctx = metrics . updatePostTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( updatePostTitleSQL ) ; stmt . setString ( 1 , title ) ; stmt . setLong ( 2 , postId ) ; return stmt . executeUpdate ( ) > 0 ; } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt ) ; }
public class CmsFileExplorer { /** * Sets the toolbar buttons enabled . < p > * @ param enabled the enabled flag */ private void setToolbarButtonsEnabled ( boolean enabled ) { } }
m_publishButton . setEnabled ( enabled ) ; m_newButton . setEnabled ( enabled ) ; m_uploadButton . setEnabled ( enabled ) ; if ( enabled ) { m_publishButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_PUBLISH_BUTTON_TITLE_0 ) ) ; m_newButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_NEW_RESOURCE_TITLE_0 ) ) ; m_uploadButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_UPLOAD_BUTTON_TITLE_0 ) ) ; } else { m_publishButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_TOOLBAR_NOT_AVAILABLE_ONLINE_0 ) ) ; m_newButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_TOOLBAR_NOT_AVAILABLE_ONLINE_0 ) ) ; m_uploadButton . setDescription ( CmsVaadinUtils . getMessageText ( Messages . GUI_TOOLBAR_NOT_AVAILABLE_ONLINE_0 ) ) ; }
public class AbstractSelectableChannel { /** * Adjusts this channel ' s blocking mode . * < p > If the given blocking mode is different from the current blocking * mode then this method invokes the { @ link # implConfigureBlocking * implConfigureBlocking } method , while holding the appropriate locks , in * order to change the mode . < / p > */ public final SelectableChannel configureBlocking ( boolean block ) throws IOException { } }
synchronized ( regLock ) { if ( ! isOpen ( ) ) throw new ClosedChannelException ( ) ; if ( blocking == block ) return this ; if ( block && haveValidKeys ( ) ) throw new IllegalBlockingModeException ( ) ; implConfigureBlocking ( block ) ; blocking = block ; } return this ;
public class ElasticsearchReporter { /** * Create a new connection when the bulk size has hit the limit * Checked on every write of a metric */ private HttpURLConnection createNewConnectionIfBulkSizeReached ( HttpURLConnection connection , int entriesWritten ) throws IOException { } }
if ( entriesWritten % bulkSize == 0 ) { closeConnection ( connection ) ; return openConnection ( "/_bulk" , "POST" ) ; } return connection ;
public class SSTableExport { /** * Export an SSTable and write the resulting JSON to standard out . * @ param desc the descriptor of the sstable to read from * @ param excludes keys to exclude from export * @ param metadata Metadata to print keys in a proper format * @ throws IOException on failure to read / write SSTable / standard out */ public static void export ( Descriptor desc , String [ ] excludes , CFMetaData metadata ) throws IOException { } }
export ( desc , System . out , excludes , metadata ) ;
public class HtmlTreeBuilder { /** * active formatting elements */ void pushActiveFormattingElements ( Element in ) { } }
int numSeen = 0 ; for ( int pos = formattingElements . size ( ) - 1 ; pos >= 0 ; pos -- ) { Element el = formattingElements . get ( pos ) ; if ( el == null ) // marker break ; if ( isSameFormattingElement ( in , el ) ) numSeen ++ ; if ( numSeen == 3 ) { formattingElements . remove ( pos ) ; break ; } } formattingElements . add ( in ) ;
public class EJSContainer { /** * Tell the container that the given container transaction has * completed . < p > */ public void containerASCompleted ( Object asKey ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d532639.2 if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "containerASCompleted : " + asKey ) ; containerASMap . remove ( asKey ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "containerASCompleted" ) ;
public class PersistentBinaryDeque { /** * Used by test only */ int numOpenSegments ( ) { } }
int numOpen = 0 ; for ( PBDSegment segment : m_segments . values ( ) ) { if ( ! segment . isClosed ( ) ) { numOpen ++ ; } } return numOpen ;
public class AmazonRedshiftClient { /** * Enables the automatic copy of snapshots from one region to another region for a specified cluster . * @ param enableSnapshotCopyRequest * @ return Result of the EnableSnapshotCopy operation returned by the service . * @ throws IncompatibleOrderableOptionsException * The specified options are incompatible . * @ throws InvalidClusterStateException * The specified cluster is not in the < code > available < / code > state . * @ throws ClusterNotFoundException * The < code > ClusterIdentifier < / code > parameter does not refer to an existing cluster . * @ throws CopyToRegionDisabledException * Cross - region snapshot copy was temporarily disabled . Try your request again . * @ throws SnapshotCopyAlreadyEnabledException * The cluster already has cross - region snapshot copy enabled . * @ throws UnknownSnapshotCopyRegionException * The specified region is incorrect or does not exist . * @ throws UnauthorizedOperationException * Your account is not authorized to perform the requested operation . * @ throws SnapshotCopyGrantNotFoundException * The specified snapshot copy grant can ' t be found . Make sure that the name is typed correctly and that the * grant exists in the destination region . * @ throws LimitExceededException * The encryption key has exceeded its grant limit in AWS KMS . * @ throws DependentServiceRequestThrottlingException * The request cannot be completed because a dependent service is throttling requests made by Amazon * Redshift on your behalf . Wait and retry the request . * @ throws InvalidRetentionPeriodException * The retention period specified is either in the past or is not a valid value . < / p > * The value must be either - 1 or an integer between 1 and 3,653. * @ sample AmazonRedshift . EnableSnapshotCopy * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / EnableSnapshotCopy " target = " _ top " > AWS * API Documentation < / a > */ @ Override public Cluster enableSnapshotCopy ( EnableSnapshotCopyRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeEnableSnapshotCopy ( request ) ;
public class FactoryWaveletCoiflet { /** * Creates a description of a Coiflet of order I wavelet . * @ param I order of the wavelet . * @ return Wavelet description . */ public static WaveletDescription < WlCoef_F32 > generate_F32 ( int I ) { } }
if ( I != 6 ) { throw new IllegalArgumentException ( "Only 6 is currently supported" ) ; } WlCoef_F32 coef = new WlCoef_F32 ( ) ; coef . offsetScaling = - 2 ; coef . offsetWavelet = - 2 ; coef . scaling = new float [ 6 ] ; coef . wavelet = new float [ 6 ] ; double sqrt7 = Math . sqrt ( 7 ) ; double div = 16.0 * Math . sqrt ( 2 ) ; coef . scaling [ 0 ] = ( float ) ( ( 1.0 - sqrt7 ) / div ) ; coef . scaling [ 1 ] = ( float ) ( ( 5.0 + sqrt7 ) / div ) ; coef . scaling [ 2 ] = ( float ) ( ( 14.0 + 2.0 * sqrt7 ) / div ) ; coef . scaling [ 3 ] = ( float ) ( ( 14.0 - 2.0 * sqrt7 ) / div ) ; coef . scaling [ 4 ] = ( float ) ( ( 1.0 - sqrt7 ) / div ) ; coef . scaling [ 5 ] = ( float ) ( ( - 3.0 + sqrt7 ) / div ) ; coef . wavelet [ 0 ] = coef . scaling [ 5 ] ; coef . wavelet [ 1 ] = - coef . scaling [ 4 ] ; coef . wavelet [ 2 ] = coef . scaling [ 3 ] ; coef . wavelet [ 3 ] = - coef . scaling [ 2 ] ; coef . wavelet [ 4 ] = coef . scaling [ 1 ] ; coef . wavelet [ 5 ] = - coef . scaling [ 0 ] ; WlBorderCoefStandard < WlCoef_F32 > inverse = new WlBorderCoefStandard < > ( coef ) ; return new WaveletDescription < > ( new BorderIndex1D_Wrap ( ) , coef , inverse ) ;
public class Pubsub { /** * Acknowledge a batch of received messages . * @ param canonicalSubscriptionName The canonical ( including project name ) subscription to acknowledge messages on . * @ param ackIds List of message ID ' s to acknowledge . * @ return A future that is completed when this request is completed . */ public PubsubFuture < Void > acknowledge ( final String canonicalSubscriptionName , final List < String > ackIds ) { } }
final String path = canonicalSubscriptionName + ":acknowledge" ; final AcknowledgeRequest req = AcknowledgeRequest . builder ( ) . ackIds ( ackIds ) . build ( ) ; return post ( "acknowledge" , path , req , VOID ) ;
public class OnOffPropertyCoder { /** * @ see PropertyCoder # decodeProperty */ Object decodeProperty ( MsgDestEncodingUtilsImpl . PropertyInputStream stream ) throws JMSException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "decodeProperty" , stream ) ; Object value = super . decodeProperty ( stream ) ; if ( SHORT_ON . equals ( value ) ) { value = ApiJmsConstants . ON ; } else { value = ApiJmsConstants . OFF ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "decodeProperty" , value ) ; return value ;
public class BitShuffle { /** * Convert the input bit - shuffled byte array into an original double array . * @ param input * @ return a double array * @ throws IOException */ public static double [ ] unshuffleDoubleArray ( byte [ ] input ) throws IOException { } }
double [ ] output = new double [ input . length / 8 ] ; int numProcessed = impl . unshuffle ( input , 0 , 8 , input . length , output , 0 ) ; assert ( numProcessed == input . length ) ; return output ;
public class WizardDialogDecorator { /** * Creates and returns a listener , which allows to show the previous fragment , when the * corresponding button is clicked . * @ return The listener , which has been created , as an instance of the type { @ link * OnClickListener } */ private OnClickListener createBackButtonListener ( ) { } }
return new OnClickListener ( ) { @ Override public void onClick ( final View v ) { int selectedIndex = viewPager . getCurrentItem ( ) ; if ( notifyOnPrevious ( selectedIndex ) ) { viewPager . setCurrentItem ( selectedIndex - 1 ) ; } } } ;
public class ChromosomeMappingTools { /** * Pretty print the details of a GeneChromosomePosition to a String * @ param chromosomePosition * @ return */ public static String formatExonStructure ( GeneChromosomePosition chromosomePosition ) { } }
if ( chromosomePosition . getOrientation ( ) == '+' ) return formatExonStructureForward ( chromosomePosition ) ; return formatExonStructureReverse ( chromosomePosition ) ;
public class DBaseFileWriter { /** * Write a record for attribute provider . * @ param element is the element to write . * @ throws AttributeException if an attribute cannot pre set . * @ throws IOException in case of IO error . */ @ SuppressWarnings ( { } }
"checkstyle:cyclomaticcomplexity" , "checkstyle:magicnumber" } ) public void writeRecord ( AttributeProvider element ) throws IOException , AttributeException { if ( this . columns == null ) { throw new MustCallWriteHeaderFunctionException ( ) ; } // Field deleted flag ( value : 2Ah ( * ) = > Record is deleted , 20h ( blank ) = > Record is valid ) this . stream . writeByte ( 0x20 ) ; for ( final DBaseFileField field : this . columns ) { // Get attribute final DBaseFieldType dbftype = field . getType ( ) ; final String fieldName = field . getName ( ) ; AttributeValue attr = element != null ? element . getAttribute ( fieldName ) : null ; if ( attr == null ) { attr = new AttributeValueImpl ( dbftype . toAttributeType ( ) ) ; attr . setToDefault ( ) ; } else { if ( attr . isAssigned ( ) ) { attr = new AttributeValueImpl ( attr ) ; attr . cast ( dbftype . toAttributeType ( ) ) ; } else { attr = new AttributeValueImpl ( attr ) ; attr . setToDefaultIfUninitialized ( ) ; } } // Write value switch ( dbftype ) { case BOOLEAN : writeDBFBoolean ( attr . getBoolean ( ) ) ; break ; case DATE : writeDBFDate ( attr . getDate ( ) ) ; break ; case STRING : writeDBFString ( attr . getString ( ) , field . getLength ( ) , ( byte ) ' ' ) ; break ; case INTEGER_2BYTES : writeDBFInteger ( ( short ) attr . getInteger ( ) ) ; break ; case INTEGER_4BYTES : writeDBFLong ( ( int ) attr . getInteger ( ) ) ; break ; case DOUBLE : writeDBFDouble ( attr . getReal ( ) ) ; break ; case FLOATING_NUMBER : case NUMBER : if ( attr . getType ( ) == AttributeType . REAL ) { writeDBFNumber ( attr . getReal ( ) , field . getLength ( ) , field . getDecimalPointPosition ( ) ) ; } else { writeDBFNumber ( attr . getInteger ( ) , field . getLength ( ) , field . getDecimalPointPosition ( ) ) ; } break ; case BINARY : case GENERAL : case MEMORY : case PICTURE : case VARIABLE : // not yet supported writeDBFString ( ( String ) null , 10 , ( byte ) 0 ) ; break ; default : throw new IllegalStateException ( ) ; } } // Be sure that the memory was not fully filled by the saving process if ( element != null ) { element . freeMemory ( ) ; }
public class ArrayHelper { /** * Get an array that contains all elements , except for the last element . * @ param < ELEMENTTYPE > * Array element type * @ param aArray * The source array . May be < code > null < / code > . * @ return < code > null < / code > if the passed array is < code > null < / code > or has * less than one element . A non - < code > null < / code > copy of the array * without the last element otherwise . */ @ Nullable @ ReturnsMutableCopy @ SafeVarargs public static < ELEMENTTYPE > ELEMENTTYPE [ ] getAllExceptLast ( @ Nullable final ELEMENTTYPE ... aArray ) { } }
return getAllExceptLast ( aArray , 1 ) ;
public class VMCommandLine { /** * Shift the command line parameters by one on the left . * The first parameter is removed from the list . * @ return the removed element or < code > null < / code > */ public static String shiftCommandLineParameters ( ) { } }
String removed = null ; if ( commandLineParameters != null ) { if ( commandLineParameters . length == 0 ) { commandLineParameters = null ; } else if ( commandLineParameters . length == 1 ) { removed = commandLineParameters [ 0 ] ; commandLineParameters = null ; } else { removed = commandLineParameters [ 0 ] ; final String [ ] newTab = new String [ commandLineParameters . length - 1 ] ; System . arraycopy ( commandLineParameters , 1 , newTab , 0 , commandLineParameters . length - 1 ) ; commandLineParameters = newTab ; } } return removed ;
public class Filtering { /** * Creates an iterator yielding elements from the source iterable matching * the given predicate . This transformation is applied lazily and the * predicate is applied while consuming returned iterator . E . g : * < code > filter ( [ 1,2,3,4 ] , isEven ) - > [ 2,4 ] < / code > * @ param < E > the iterable element type * @ param iterable the iterable where elements are fetched from * @ param predicate the predicate applied to each element * @ return an iterator containing the elements for which the predicate * evaluates to true */ public static < E > Iterator < E > filter ( Iterable < E > iterable , Predicate < E > predicate ) { } }
dbc . precondition ( iterable != null , "cannot call filter with a null iterable" ) ; return filter ( iterable . iterator ( ) , predicate ) ;
public class GatewayConfigParser { /** * Validate the parsed gateway configuration file . * @ param configDoc the XmlObject representing the gateway - config document */ private void validateGatewayConfig ( GatewayConfigDocument configDoc , List < String > preProcessErrors ) { } }
List < XmlError > errorList = new ArrayList < > ( ) ; for ( String preProcessError : preProcessErrors ) { errorList . add ( XmlError . forMessage ( preProcessError , XmlError . SEVERITY_ERROR ) ) ; } if ( errorList . isEmpty ( ) ) { XmlOptions validationOptions = new XmlOptions ( ) ; validationOptions . setLoadLineNumbers ( ) ; validationOptions . setLoadLineNumbers ( XmlOptions . LOAD_LINE_NUMBERS_END_ELEMENT ) ; validationOptions . setErrorListener ( errorList ) ; boolean valid = configDoc . validate ( validationOptions ) ; if ( valid ) { // Perform custom validations that aren ' t expressed in the XSD GatewayConfigDocument . GatewayConfig config = configDoc . getGatewayConfig ( ) ; ServiceType [ ] services = config . getServiceArray ( ) ; if ( services != null && services . length > 0 ) { List < String > serviceNames = new ArrayList < > ( ) ; for ( ServiceType service : services ) { String name = service . getName ( ) ; if ( name == null || name . length ( ) == 0 ) { errorList . add ( XmlError . forMessage ( "All services must have unique non-empty names" , XmlError . SEVERITY_ERROR ) ) ; } else if ( serviceNames . indexOf ( name ) >= 0 ) { errorList . add ( XmlError . forMessage ( "Service name must be unique. More than one service named '" + name + "'" , XmlError . SEVERITY_ERROR ) ) ; } else { serviceNames . add ( name ) ; } } } SecurityType [ ] security = config . getSecurityArray ( ) ; if ( security != null && security . length > 1 ) { errorList . add ( XmlError . forMessage ( "Multiple <security> elements found; only one allowed" , XmlError . SEVERITY_ERROR ) ) ; } ServiceDefaultsType [ ] serviceDefaults = config . getServiceDefaultsArray ( ) ; if ( serviceDefaults != null && serviceDefaults . length > 1 ) { errorList . add ( XmlError . forMessage ( "Multiple <service-defaults> elements found; only one allowed" , XmlError . SEVERITY_ERROR ) ) ; } ClusterType [ ] clusterConfigs = config . getClusterArray ( ) ; if ( clusterConfigs != null && clusterConfigs . length > 1 ) { errorList . add ( XmlError . forMessage ( "Multiple <cluster> elements found; only one allowed" , XmlError . SEVERITY_ERROR ) ) ; } } } // Report all validation errors if ( errorList . size ( ) > 0 ) { String validationError = "Validation errors in gateway configuration file" ; LOGGER . error ( validationError ) ; for ( XmlError error : errorList ) { int line = error . getLine ( ) ; if ( line != - 1 ) { int column = error . getColumn ( ) ; if ( column == - 1 ) { LOGGER . error ( " Line: " + line ) ; } else { LOGGER . error ( " Line: " + line + " Column: " + column ) ; } } LOGGER . error ( " " + error . getMessage ( ) . replaceAll ( "@" + GatewayConfigNamespace . CURRENT_NS , "" ) ) ; if ( error . getMessage ( ) . contains ( "notify-options" ) || error . getMessage ( ) . contains ( "notify" ) ) { validationError = "Could not start because of references to APNs in the configuration." + " APNs is not supported in this version of the gateway, but will be added in a future release." ; LOGGER . error ( validationError ) ; } if ( error . getMessage ( ) . contains ( "DataRateString" ) ) { // Yeah , it ' s crude , but customers are going to keep tripping over cases like 100KB / s being invalid otherwise // Example output : // ERROR - Validation errors in gateway configuration file // ERROR - Line : 12 Column : 36 // ERROR - string value ' 1m ' does not match pattern for DataRateString in namespace http : / / xmlns . kaazing // . com / 2012/08 / gateway // ERROR - ( permitted data rate units are B / s , kB / s , KiB / s , kB / s , MB / s , and MiB / s ) // ERROR - < xml - fragment xmlns : xsi = " http : / / www . w3 . org / 2001 / XMLSchema - instance " / > LOGGER . error ( " " + "(permitted data rate units are B/s, kB/s, KiB/s, kB/s, MB/s, and MiB/s)" ) ; } if ( error . getCursorLocation ( ) != null ) { LOGGER . error ( " " + error . getCursorLocation ( ) . xmlText ( ) ) ; } } throw new GatewayConfigParserException ( validationError ) ; }
public class InputStreamResource { /** * This implementation throws IllegalStateException if attempting to * read the underlying stream multiple times . */ @ Override public InputStream getInputStream ( ) throws IOException , IllegalStateException { } }
if ( this . read ) { throw new IllegalStateException ( "InputStream has already been read - " + "do not use InputStreamResource if a stream needs to be read multiple times" ) ; } this . read = true ; return this . inputStream ;
public class Params { /** * Test if numeric parameter is strictly greater than given threshold value . * @ param parameter invocation numeric parameter , * @ param value threshold value , * @ param name the name of invocation parameter . * @ throws IllegalArgumentException if < code > parameter < / code > is not greater than threshold value . */ public static void GT ( long parameter , long value , String name ) throws IllegalArgumentException { } }
if ( parameter <= value ) { throw new IllegalArgumentException ( String . format ( "%s is not greater than %d." , name , value ) ) ; }
public class RateOfTurn { /** * Return ' s motion after timespan * @ param motion Motion at the beginning * @ param span TimeSpan of turning * @ return */ public Motion getMotionAfter ( Motion motion , TimeSpan span ) { } }
return new Motion ( motion . getSpeed ( ) , getBearingAfter ( motion . getAngle ( ) , span ) ) ;
public class BookKeeperServiceRunner { /** * Suspends ( stops ) ZooKeeper , without destroying its underlying data ( so a subsequent resume can pick up from the * state where it left off ) . */ public void suspendZooKeeper ( ) { } }
val zk = this . zkServer . get ( ) ; Preconditions . checkState ( zk != null , "ZooKeeper not started." ) ; // Stop , but do not close , the ZK runner . zk . stop ( ) ; log . info ( "ZooKeeper suspended." ) ;
public class PseudoClassSpecifierChecker { /** * Add { @ code : empty } elements . * @ see < a href = " http : / / www . w3 . org / TR / css3 - selectors / # empty - pseudo " > < code > : empty < / code > pseudo - class < / a > */ private void addEmptyElements ( ) { } }
for ( Node node : nodes ) { boolean empty = true ; if ( node instanceof NestableNode ) { List < Node > nl = ( ( NestableNode ) node ) . getChildren ( ) ; for ( Node n : nl ) { if ( n instanceof Element ) { empty = false ; break ; } else if ( n instanceof Text ) { // TODO : Should we trim the text and see if it ' s length 0? String value = ( ( Text ) n ) . getContent ( ) ; if ( value . length ( ) > 0 ) { empty = false ; break ; } } } } if ( empty ) { result . add ( node ) ; } }
public class ComponentFactory { /** * Factory method for create a new { @ link WebMarkupContainer } . * @ param < T > * the generic type of the model * @ param id * the id * @ param model * the model * @ return the new { @ link WebMarkupContainer } . */ public static < T > WebMarkupContainer newWebMarkupContainer ( final String id , final IModel < T > model ) { } }
final WebMarkupContainer webMarkupContainer = new WebMarkupContainer ( id , model ) ; webMarkupContainer . setOutputMarkupId ( true ) ; return webMarkupContainer ;
public class GetIntegrationResult { /** * Specifies the integration ' s cache key parameters . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCacheKeyParameters ( java . util . Collection ) } or { @ link # withCacheKeyParameters ( java . util . Collection ) } if * you want to override the existing values . * @ param cacheKeyParameters * Specifies the integration ' s cache key parameters . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetIntegrationResult withCacheKeyParameters ( String ... cacheKeyParameters ) { } }
if ( this . cacheKeyParameters == null ) { setCacheKeyParameters ( new java . util . ArrayList < String > ( cacheKeyParameters . length ) ) ; } for ( String ele : cacheKeyParameters ) { this . cacheKeyParameters . add ( ele ) ; } return this ;
public class DRL6Lexer { /** * $ ANTLR start " WS " */ public final void mWS ( ) throws RecognitionException { } }
try { int _type = WS ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 71:9 : ( ( ' ' | ' \ \ t ' | ' \ \ f ' | EOL ) + ) // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 71:17 : ( ' ' | ' \ \ t ' | ' \ \ f ' | EOL ) + { // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 71:17 : ( ' ' | ' \ \ t ' | ' \ \ f ' | EOL ) + int cnt1 = 0 ; loop1 : while ( true ) { int alt1 = 5 ; switch ( input . LA ( 1 ) ) { case ' ' : { alt1 = 1 ; } break ; case '\t' : { alt1 = 2 ; } break ; case '\f' : { alt1 = 3 ; } break ; case '\n' : case '\r' : { alt1 = 4 ; } break ; } switch ( alt1 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 71:19 : ' ' { match ( ' ' ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 72:19 : ' \ \ t ' { match ( '\t' ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 73:19 : ' \ \ f ' { match ( '\f' ) ; if ( state . failed ) return ; } break ; case 4 : // src / main / resources / org / drools / compiler / lang / DRL6Lexer . g : 74:19 : EOL { mEOL ( ) ; if ( state . failed ) return ; } break ; default : if ( cnt1 >= 1 ) break loop1 ; if ( state . backtracking > 0 ) { state . failed = true ; return ; } EarlyExitException eee = new EarlyExitException ( 1 , input ) ; throw eee ; } cnt1 ++ ; } if ( state . backtracking == 0 ) { _channel = HIDDEN ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Pattern { /** * Matches this pattern against the input . * @ param input path to match with this pattern * @ return result from the matching < code > pattern < / code > against < code > input < / code > * @ throws IllegalArgumentException if < code > input < / code > is not normalized */ public MatchResult match ( QPath input ) { } }
try { return match ( new Context ( input ) ) . getMatchResult ( ) ; } catch ( RepositoryException e ) { throw ( IllegalArgumentException ) new IllegalArgumentException ( "QPath not normalized" ) . initCause ( e ) ; }
public class Roster { /** * Removes all the groups with no entries . * This is used by { @ link RosterPushListener } and { @ link RosterResultListener } to * cleanup groups after removing contacts . */ private void removeEmptyGroups ( ) { } }
// We have to do this because RosterGroup . removeEntry removes the entry immediately // ( locally ) and the group could remain empty . // TODO Check the performance / logic for rosters with large number of groups for ( RosterGroup group : getGroups ( ) ) { if ( group . getEntryCount ( ) == 0 ) { groups . remove ( group . getName ( ) ) ; } }
public class NodeWriteTrx { /** * { @ inheritDoc } */ @ Override public void setValue ( final String pValue ) throws TTException { } }
checkState ( ! mDelegate . isClosed ( ) , "Transaction is already closed." ) ; checkState ( mDelegate . getCurrentNode ( ) instanceof ITreeValData , "setValue is not allowed if current node is not an ITreeValData implementation, but was %s" , mDelegate . getCurrentNode ( ) ) ; final long oldHash = mDelegate . getCurrentNode ( ) . hashCode ( ) ; final ITreeValData node = ( ITreeValData ) getPtx ( ) . getData ( mDelegate . getCurrentNode ( ) . getDataKey ( ) ) ; node . setValue ( TypedValue . getBytes ( pValue ) ) ; getPtx ( ) . setData ( node ) ; mDelegate . setCurrentNode ( ( ITreeData ) node ) ; adaptHashedWithUpdate ( oldHash ) ;
public class BaseRequest { /** * Start the installation . */ final void install ( ) { } }
Intent intent = new Intent ( Intent . ACTION_INSTALL_PACKAGE ) ; intent . setFlags ( Intent . FLAG_ACTIVITY_NEW_TASK ) ; intent . addFlags ( Intent . FLAG_GRANT_READ_URI_PERMISSION ) ; Uri uri = AndPermission . getFileUri ( mSource . getContext ( ) , mFile ) ; intent . setDataAndType ( uri , "application/vnd.android.package-archive" ) ; mSource . startActivity ( intent ) ;
public class JmsJMSContextImpl { /** * ( non - Javadoc ) * @ see javax . jms . JMSContext # createMapMessage ( ) */ @ Override public MapMessage createMapMessage ( ) throws JMSRuntimeException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "createMapMessage" ) ; MapMessage mapMessage = null ; try { mapMessage = jmsSession . createMapMessage ( ) ; } catch ( JMSException jmse ) { throw ( JMSRuntimeException ) JmsErrorUtils . getJMS2Exception ( jmse , JMSRuntimeException . class ) ; } finally { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "createMapMessage" , new Object [ ] { mapMessage } ) ; } return mapMessage ;
public class PassThruTable { /** * Close the all the recordsets in the list . */ public void close ( ) { } }
if ( m_mapTable != null ) { Iterator < BaseTable > iterator = this . getTables ( ) ; while ( iterator . hasNext ( ) ) { BaseTable table = iterator . next ( ) ; if ( ( table != null ) && ( table != this . getNextTable ( ) ) ) { Record record = table . getRecord ( ) ; if ( record != null ) record . close ( ) ; } } } if ( m_tableNext != null ) m_tableNext . close ( ) ; super . close ( ) ;
public class HttpOutboundServiceContextImpl { /** * @ see * com . ibm . ws . http . channel . internal . HttpServiceContextImpl # createChunkHeader * ( byte [ ] ) */ @ Override protected WsByteBuffer createChunkHeader ( byte [ ] length ) { } }
if ( ! getLink ( ) . isReconnectAllowed ( ) ) { // use the " shared " chunk header object return super . createChunkHeader ( length ) ; } // must make a unique buffer to avoid data corruption during a // reconnect pass if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "OSC creating chunk length buffer" ) ; } WsByteBuffer header = allocateBuffer ( 32 ) ; header . put ( length ) ; header . put ( BNFHeaders . EOL ) ; header . flip ( ) ; return header ;
public class ExecutionVertex { /** * Cancels and removes the task represented by this vertex * from the instance it is currently running on . If the task * is not currently running , its execution state is simply * updated to < code > CANCELLED < / code > . * @ return the result of the task cancel attempt */ public TaskCancelResult cancelTask ( ) { } }
while ( true ) { final ExecutionState previousState = this . executionState . get ( ) ; if ( previousState == ExecutionState . CANCELED ) { return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } if ( previousState == ExecutionState . FAILED ) { return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } if ( previousState == ExecutionState . FINISHED ) { return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } // The vertex has already received a cancel request if ( previousState == ExecutionState . CANCELING ) { return new TaskCancelResult ( getID ( ) , ReturnCode . SUCCESS ) ; } // Do not trigger the cancel request when vertex is in state STARTING , this might cause a race between RPC // calls . if ( previousState == ExecutionState . STARTING ) { this . cancelRequested . set ( true ) ; // We had a race , so we unset the flag and take care of cancellation ourselves if ( this . executionState . get ( ) != ExecutionState . STARTING ) { this . cancelRequested . set ( false ) ; continue ; } return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } // Check if we had a race . If state change is accepted , send cancel request if ( compareAndUpdateExecutionState ( previousState , ExecutionState . CANCELING ) ) { if ( this . groupVertex . getStageNumber ( ) != this . executionGraph . getIndexOfCurrentExecutionStage ( ) ) { // Set to canceled directly updateExecutionState ( ExecutionState . CANCELED , null ) ; return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } if ( previousState != ExecutionState . RUNNING && previousState != ExecutionState . FINISHING ) { // Set to canceled directly updateExecutionState ( ExecutionState . CANCELED , null ) ; return new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . SUCCESS ) ; } final AllocatedResource ar = this . allocatedResource . get ( ) ; if ( ar == null ) { final TaskCancelResult result = new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . NO_INSTANCE ) ; result . setDescription ( "Assigned instance of vertex " + this . toString ( ) + " is null!" ) ; return result ; } try { return ar . getInstance ( ) . cancelTask ( this . vertexID ) ; } catch ( IOException e ) { final TaskCancelResult result = new TaskCancelResult ( getID ( ) , AbstractTaskResult . ReturnCode . IPC_ERROR ) ; result . setDescription ( StringUtils . stringifyException ( e ) ) ; return result ; } } }
public class ApiClientTransportFactory { /** * < p > registerTransport . < / p > * @ param transportClazz a { @ link java . lang . String } object . */ @ SuppressWarnings ( "unchecked" ) public void registerTransport ( String transportClazz ) { } }
if ( transportClazz == null ) { return ; } try { registerTransport ( ( Class < Transport > ) Class . forName ( transportClazz ) ) ; } catch ( ClassNotFoundException e ) { return ; } catch ( ClassCastException cce ) { throw new ApiTransportException ( cce ) ; }
public class MasterElectionListener { /** * 是否和当前节点是相同的GROUP * @ param node * @ return */ private boolean isSameGroup ( Node node ) { } }
return node . getNodeType ( ) . equals ( appContext . getConfig ( ) . getNodeType ( ) ) && node . getGroup ( ) . equals ( appContext . getConfig ( ) . getNodeGroup ( ) ) ;
public class FilesImpl { /** * Returns the content of the specified compute node file . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node that contains the file . * @ param filePath The path to the compute node file that you want to get the content of . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the InputStream object */ public Observable < ServiceResponseWithHeaders < InputStream , FileGetFromComputeNodeHeaders > > getFromComputeNodeWithServiceResponseAsync ( String poolId , String nodeId , String filePath ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( poolId == null ) { throw new IllegalArgumentException ( "Parameter poolId is required and cannot be null." ) ; } if ( nodeId == null ) { throw new IllegalArgumentException ( "Parameter nodeId is required and cannot be null." ) ; } if ( filePath == null ) { throw new IllegalArgumentException ( "Parameter filePath is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final FileGetFromComputeNodeOptions fileGetFromComputeNodeOptions = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String ocpRange = null ; DateTime ifModifiedSince = null ; DateTime ifUnmodifiedSince = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } DateTimeRfc1123 ifModifiedSinceConverted = null ; if ( ifModifiedSince != null ) { ifModifiedSinceConverted = new DateTimeRfc1123 ( ifModifiedSince ) ; } DateTimeRfc1123 ifUnmodifiedSinceConverted = null ; if ( ifUnmodifiedSince != null ) { ifUnmodifiedSinceConverted = new DateTimeRfc1123 ( ifUnmodifiedSince ) ; } return service . getFromComputeNode ( poolId , nodeId , filePath , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , ocpRange , ifModifiedSinceConverted , ifUnmodifiedSinceConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < InputStream , FileGetFromComputeNodeHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < InputStream , FileGetFromComputeNodeHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < InputStream , FileGetFromComputeNodeHeaders > clientResponse = getFromComputeNodeDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class KafkaHelper { /** * Create a producer that writes String keys and values * @ param overrideConfig Producer config to override * @ return KafkaProducer */ public KafkaProducer < String , String > createStringProducer ( Properties overrideConfig ) { } }
return createProducer ( new StringSerializer ( ) , new StringSerializer ( ) , overrideConfig ) ;
public class DocumentConverter { /** * { @ inheritDoc } */ @ Override public XBELDocument convert ( Document source ) { } }
if ( source == null ) return null ; XBELDocument xd = new XBELDocument ( ) ; List < StatementGroup > stmtGroups = source . getStatementGroups ( ) ; List < XBELStatementGroup > xstmtGroups = xd . getStatementGroup ( ) ; StatementGroupConverter sgConverter = new StatementGroupConverter ( ) ; for ( final StatementGroup sg : stmtGroups ) { // Defer to StatementGroupConverter xstmtGroups . add ( sgConverter . convert ( sg ) ) ; } List < AnnotationDefinition > definitions = source . getDefinitions ( ) ; if ( hasItems ( definitions ) ) { XBELAnnotationDefinitionGroup xadGroup = new XBELAnnotationDefinitionGroup ( ) ; List < XBELInternalAnnotationDefinition > internals = xadGroup . getInternalAnnotationDefinition ( ) ; List < XBELExternalAnnotationDefinition > externals = xadGroup . getExternalAnnotationDefinition ( ) ; InternalAnnotationDefinitionConverter iConverter = new InternalAnnotationDefinitionConverter ( ) ; ExternalAnnotationDefinitionConverter eConverter = new ExternalAnnotationDefinitionConverter ( ) ; for ( final AnnotationDefinition ad : definitions ) { XBELInternalAnnotationDefinition iad = iConverter . convert ( ad ) ; if ( iad != null ) { internals . add ( iad ) ; continue ; } XBELExternalAnnotationDefinition ead = eConverter . convert ( ad ) ; if ( ead != null ) { externals . add ( ead ) ; } } xd . setAnnotationDefinitionGroup ( xadGroup ) ; } Header header = source . getHeader ( ) ; HeaderConverter hConverter = new HeaderConverter ( ) ; // Defer to HeaderConverter xd . setHeader ( hConverter . convert ( header ) ) ; NamespaceGroup nsGroup = source . getNamespaceGroup ( ) ; if ( nsGroup != null ) { NamespaceGroupConverter ngConverter = new NamespaceGroupConverter ( ) ; // Defer to NamespaceGroupConverter xd . setNamespaceGroup ( ngConverter . convert ( nsGroup ) ) ; } return xd ;
public class Duration { /** * / * [ deutsch ] * < p > Extrahiert eine neue Dauer , die nur alle kalendarischen Zeiteinheiten * dieser Dauer enth & auml ; lt . < / p > * < p > Der Uhrzeitanteil wird entfernt . < / p > * @ return new duration with calendar units only * @ since 3.0 * @ see # compose ( Duration , Duration ) * @ see # toClockPeriod ( ) */ public Duration < CalendarUnit > toCalendarPeriod ( ) { } }
if ( this . isEmpty ( ) ) { return Duration . ofZero ( ) ; } List < Item < CalendarUnit > > calItems = new ArrayList < > ( ) ; for ( Item < U > item : this . items ) { if ( item . getUnit ( ) instanceof CalendarUnit ) { calItems . add ( Item . of ( item . getAmount ( ) , CalendarUnit . class . cast ( item . getUnit ( ) ) ) ) ; } } if ( calItems . isEmpty ( ) ) { return Duration . ofZero ( ) ; } return new Duration < > ( calItems , this . isNegative ( ) ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcHumidifierType ( ) { } }
if ( ifcHumidifierTypeEClass == null ) { ifcHumidifierTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 315 ) ; } return ifcHumidifierTypeEClass ;
public class InfoVisitor { /** * [ visitCode ( visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber ) * visitMaxs ] visitEnd . */ @ Override public MethodVisitor visitMethod ( int access , String name , String desc , String signature , String exceptions [ ] ) { } }
// skip static initializers if ( name . equals ( "<clinit>" ) ) { return null ; } MethodInfoImpl methodInfo = new MethodInfoImpl ( name , desc , exceptions , access , classInfo ) ; if ( name . equals ( "<init>" ) ) { constructorInfos . add ( methodInfo ) ; } else { methodInfos . add ( methodInfo ) ; } if ( logParms != null ) { logParms [ 1 ] = name ; logParms [ 2 ] = methodInfo . getHashText ( ) ; } methodVisitor . setMethodInfo ( methodInfo ) ; return methodVisitor ;
public class DogmaApi { /** * Get effects Get a list of dogma effect ids - - - This route expires daily * at 11:05 * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return List & lt ; Integer & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < Integer > getDogmaEffects ( String datasource , String ifNoneMatch ) throws ApiException { } }
ApiResponse < List < Integer > > resp = getDogmaEffectsWithHttpInfo ( datasource , ifNoneMatch ) ; return resp . getData ( ) ;
public class AstUtils { /** * Determine if an { @ link AnnotatedNode } has one or more of the specified annotations . * N . B . the annotation type names are not normally fully qualified . * @ param node the node to examine * @ param annotations the annotations to look for * @ return { @ code true } if at least one of the annotations is found , otherwise * { @ code false } */ public static boolean hasAtLeastOneAnnotation ( AnnotatedNode node , String ... annotations ) { } }
for ( AnnotationNode annotationNode : node . getAnnotations ( ) ) { for ( String annotation : annotations ) { if ( PatternMatchUtils . simpleMatch ( annotation , annotationNode . getClassNode ( ) . getName ( ) ) ) { return true ; } } } return false ;
public class AjaxAddableTabbedPanel { /** * Factory method for creating the new label of the button . * @ param id * the id * @ param model * the model * @ return the new label of the button . */ protected Label newAddTabButtonLabel ( final String id , final IModel < String > model ) { } }
return ComponentFactory . newLabel ( id , model ) ;
public class ParametricQuery { /** * Executes the SELECT statement , passing the result set to the ResultSetWorker for processing . * The ResultSetWorker must close the result set before returning . */ public < T > T executeSelect ( Connection conn , DataObject object , ResultSetWorker < T > worker ) throws Exception { } }
PreparedStatement statement = conn . prepareStatement ( _sql ) ; try { load ( statement , object ) ; return worker . process ( statement . executeQuery ( ) ) ; } finally { statement . close ( ) ; }
public class ClassHelper { /** * Builds a new instance for the class represented by the given class descriptor . * @ param cld The class descriptor * @ return The instance */ public static Object buildNewObjectInstance ( ClassDescriptor cld ) { } }
Object result = null ; // If either the factory class and / or factory method is null , // just follow the normal code path and create via constructor if ( ( cld . getFactoryClass ( ) == null ) || ( cld . getFactoryMethod ( ) == null ) ) { try { // 1 . create an empty Object ( persistent classes need a public default constructor ) Constructor con = cld . getZeroArgumentConstructor ( ) ; if ( con == null ) { throw new ClassNotPersistenceCapableException ( "A zero argument constructor was not provided! Class was '" + cld . getClassNameOfObject ( ) + "'" ) ; } result = ConstructorHelper . instantiate ( con ) ; } catch ( InstantiationException e ) { throw new ClassNotPersistenceCapableException ( "Can't instantiate class '" + cld . getClassNameOfObject ( ) + "'" ) ; } } else { try { // 1 . create an empty Object by calling the no - parms factory method Method method = cld . getFactoryMethod ( ) ; if ( Modifier . isStatic ( method . getModifiers ( ) ) ) { // method is static so call it directly result = method . invoke ( null , null ) ; } else { // method is not static , so create an object of the factory first // note that this requires a public no - parameter ( default ) constructor Object factoryInstance = cld . getFactoryClass ( ) . newInstance ( ) ; result = method . invoke ( factoryInstance , null ) ; } } catch ( Exception ex ) { throw new PersistenceBrokerException ( "Unable to build object instance of class '" + cld . getClassNameOfObject ( ) + "' from factory:" + cld . getFactoryClass ( ) + "." + cld . getFactoryMethod ( ) , ex ) ; } } return result ;
public class WFG9 { /** * WFG9 t1 transformation */ public float [ ] t1 ( float [ ] z , int k ) { } }
float [ ] result = new float [ z . length ] ; float [ ] w = new float [ z . length ] ; for ( int i = 0 ; i < w . length ; i ++ ) { w [ i ] = ( float ) 1.0 ; } for ( int i = 0 ; i < z . length - 1 ; i ++ ) { int head = i + 1 ; int tail = z . length - 1 ; float [ ] subZ = subVector ( z , head , tail ) ; float [ ] subW = subVector ( w , head , tail ) ; float aux = ( new Transformations ( ) ) . rSum ( subZ , subW ) ; result [ i ] = ( new Transformations ( ) ) . bParam ( z [ i ] , aux , ( float ) 0.98 / ( float ) 49.98 , ( float ) 0.02 , ( float ) 50 ) ; } result [ z . length - 1 ] = z [ z . length - 1 ] ; return result ;
public class Serializer { /** * Registers a type serializer with an identifier . * The provided serializable type ID will be used to identify the serializable type during serialization and deserialization . * When objects of the given { @ code type } are serialized to a { @ link Buffer } , the given type * { @ code id } will be written to the buffer in lieu of its class name . When the object is deserialized , the type { @ code id } * will be used to look up the class . It is essential that the given { @ code type } be registered with the same { @ code id } * on all { @ link Serializer } instances . * Because a custom { @ link TypeSerializer } is provided , the registered { @ code type } can be any class and does not have to * implement any particular interface . * Internally , the provided class will be wrapped in a { @ link DefaultTypeSerializerFactory } . The serializer * class can be registered for more than one { @ code type } class . The factory will instantiate a new * { @ link TypeSerializer } instance once for each type for which the serializer is registered per { @ link Serializer } * instance . If the { @ code Serializer } instance is { @ link Serializer # clone ( ) cloned } , the serializer * factory will be copied and a new { @ link TypeSerializer } will be instantiated for the clone . * @ param type The serializable type . * @ param id The serializable type ID . * @ param serializer The serializer to register . * @ return The serializer instance . */ public Serializer register ( Class < ? > type , int id , Class < ? extends TypeSerializer > serializer ) { } }
registry . register ( type , id , serializer ) ; return this ;
public class Widget { /** * Enable clipping for the Widget . Widget content including its children will be clipped by a * rectangular View Port . By default clipping is disabled . */ public void enableClipRegion ( ) { } }
if ( mClippingEnabled ) { Log . w ( TAG , "Clipping has been enabled already for %s!" , getName ( ) ) ; return ; } Log . d ( Log . SUBSYSTEM . WIDGET , TAG , "enableClipping for %s [%f, %f, %f]" , getName ( ) , getViewPortWidth ( ) , getViewPortHeight ( ) , getViewPortDepth ( ) ) ; mClippingEnabled = true ; GVRTexture texture = WidgetLib . getTextureHelper ( ) . getSolidColorTexture ( Color . YELLOW ) ; GVRSceneObject clippingObj = new GVRSceneObject ( mContext , getViewPortWidth ( ) , getViewPortHeight ( ) , texture ) ; clippingObj . setName ( "clippingObj" ) ; clippingObj . getRenderData ( ) . setRenderingOrder ( GVRRenderData . GVRRenderingOrder . STENCIL ) . setStencilTest ( true ) . setStencilFunc ( GLES30 . GL_ALWAYS , 1 , 0xFF ) . setStencilOp ( GLES30 . GL_KEEP , GLES30 . GL_KEEP , GLES30 . GL_REPLACE ) . setStencilMask ( 0xFF ) ; mSceneObject . addChildObject ( clippingObj ) ; for ( Widget child : getChildren ( ) ) { setObjectClipped ( child ) ; }
public class ThreadSafety { /** * Checks that any thread - safe type parameters are instantiated with thread - safe types . */ public Violation checkInstantiation ( Collection < TypeVariableSymbol > typeParameters , Collection < Type > typeArguments ) { } }
return Streams . zip ( typeParameters . stream ( ) , typeArguments . stream ( ) , ( sym , type ) -> { if ( ! hasThreadSafeTypeParameterAnnotation ( sym ) ) { return Violation . absent ( ) ; } Violation info = isThreadSafeType ( /* allowContainerTypeParameters = */ true , /* containerTypeParameters = */ ImmutableSet . of ( ) , type ) ; if ( ! info . isPresent ( ) ) { return Violation . absent ( ) ; } return info . plus ( String . format ( "instantiation of '%s' is %s" , sym , purpose . mutableOrNotThreadSafe ( ) ) ) ; } ) . filter ( Violation :: isPresent ) . findFirst ( ) . orElse ( Violation . absent ( ) ) ;
public class AiMesh { /** * Returns the x - coordinate of a vertex position . * @ param vertex the vertex index * @ return the x coordinate */ public float getPositionX ( int vertex ) { } }
if ( ! hasPositions ( ) ) { throw new IllegalStateException ( "mesh has no positions" ) ; } checkVertexIndexBounds ( vertex ) ; return m_vertices . getFloat ( vertex * 3 * SIZEOF_FLOAT ) ;
public class RequestUtils { /** * Convenience method to get parameters in case < code > multipart / form - data < / code > request was used . * Returns a map where keys are names of all parameters , while values are the first value for each parameter , even * if such parameter has more than one value submitted . * @ param formItems form items retrieved from < code > multipart / form - data < / code > request . * @ return a map where keys are names of all parameters , while values are first value for each parameter , even * if such parameter has more than one value submitted . */ public static Map < String , String > params1st ( List < FormItem > formItems ) { } }
Map < String , String > vals = new HashMap < > ( ) ; for ( FormItem formItem : formItems ) { if ( formItem . isFormField ( ) && ! vals . containsKey ( formItem . getFieldName ( ) ) ) { vals . put ( formItem . getFieldName ( ) , formItem . getStreamAsString ( ) ) ; } } return vals ;
public class JQMPage { /** * Content band will be centered between Header and Footer * ( they must be defined as fixed = " true " OR page . pseudoFixedToolbars = " true " ) . */ public void setContentCentered ( boolean contentCentered ) { } }
boolean oldVal = this . contentCentered ; this . contentCentered = contentCentered ; if ( oldVal != this . contentCentered && content != null && content . isAttached ( ) ) { if ( this . contentCentered ) { centerContent ( ) ; initWindowResize ( ) ; } else { clearCenterContent ( ) ; } }
public class hqlLexer { /** * $ ANTLR start " MEMBER " */ public final void mMEMBER ( ) throws RecognitionException { } }
try { int _type = MEMBER ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 45:8 : ( ' member ' ) // hql . g : 45:10 : ' member ' { match ( "member" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Say { /** * - - - - - DEBUG - - - - - */ public static void debug ( Object message ) { } }
log ( Level . DEBUG , message , null , ( Object [ ] ) null ) ;
public class ChatController { /** * Gets profile id from Foundation for the active user . * @ return Active user profile id . */ String getProfileId ( ) { } }
final RxComapiClient client = clientReference . get ( ) ; return client != null ? client . getSession ( ) . getProfileId ( ) : null ;
public class PersistentEntityStoreImpl { /** * Gets or creates id of the entity type . * @ param entityType entity type name . * @ param allowCreate if set to true and if there is no entity type like entityType , * create the new id for the entityType . * @ return entity type id . */ @ Deprecated public int getEntityTypeId ( @ NotNull final String entityType , final boolean allowCreate ) { } }
return getEntityTypeId ( txnProvider , entityType , allowCreate ) ;
public class ColumnNameHelper { /** * Merge 2 lists of min cell name components . * @ param minColumnNames lhs * @ param candidates rhs * @ param comparator comparator to use * @ return a list with smallest column names according to ( sub ) comparator */ public static List < ByteBuffer > mergeMin ( List < ByteBuffer > minColumnNames , List < ByteBuffer > candidates , CellNameType comparator ) { } }
if ( minColumnNames . isEmpty ( ) ) return minimalBuffersFor ( candidates ) ; if ( candidates . isEmpty ( ) ) return minColumnNames ; List < ByteBuffer > biggest = minColumnNames . size ( ) > candidates . size ( ) ? minColumnNames : candidates ; List < ByteBuffer > smallest = minColumnNames . size ( ) > candidates . size ( ) ? candidates : minColumnNames ; // We want to always copy the smallest list , and maybeGrow does it only if it ' s actually smaller List < ByteBuffer > retList = smallest . size ( ) == biggest . size ( ) ? new ArrayList < > ( smallest ) : maybeGrow ( smallest , biggest . size ( ) ) ; for ( int i = 0 ; i < biggest . size ( ) ; i ++ ) retList . set ( i , minimalBufferFor ( min ( retList . get ( i ) , biggest . get ( i ) , comparator . subtype ( i ) ) ) ) ; return retList ;
public class ForAxis { /** * { @ inheritDoc } */ @ Override public void reset ( final long mNodeKey ) { } }
super . reset ( mNodeKey ) ; mIsFirst = true ; if ( mRange != null ) { mRange . reset ( mNodeKey ) ; }
public class TomlWriter { /** * Write an Object in TOML to a { @ link OutputStream } . Output is encoded as UTF - 8. * @ param from the object to be written * @ param target the OutputStream to which the TOML will be written . The stream is NOT closed after being written to . * @ throws IOException if target . write ( ) fails */ public void write ( Object from , OutputStream target ) throws IOException { } }
OutputStreamWriter writer = new OutputStreamWriter ( target , "UTF-8" ) ; write ( from , writer ) ; writer . flush ( ) ;
public class AWSWAFRegionalClient { /** * Returns an array of < a > XssMatchSet < / a > objects . * @ param listXssMatchSetsRequest * A request to list the < a > XssMatchSet < / a > objects created by the current AWS account . * @ return Result of the ListXssMatchSets operation returned by the service . * @ throws WAFInternalErrorException * The operation failed because of a system problem , even though the request was valid . Retry your request . * @ throws WAFInvalidAccountException * The operation failed because you tried to create , update , or delete an object by using an invalid account * identifier . * @ sample AWSWAFRegional . ListXssMatchSets * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / waf - regional - 2016-11-28 / ListXssMatchSets " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListXssMatchSetsResult listXssMatchSets ( ListXssMatchSetsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListXssMatchSets ( request ) ;
public class CommonOps_DDF6 { /** * Sets every element in the vector to the specified value . < br > * < br > * a < sub > i < / sub > = value * @ param a A vector whose elements are about to be set . Modified . * @ param v The value each element will have . */ public static void fill ( DMatrix6 a , double v ) { } }
a . a1 = v ; a . a2 = v ; a . a3 = v ; a . a4 = v ; a . a5 = v ; a . a6 = v ;
public class TimeZoneFormat { /** * Private method returning the target region . The target regions is determined by * the locale of this instance . When a generic name is coming from * a meta zone , this region is used for checking if the time zone * is a reference zone of the meta zone . * @ return the target region */ private synchronized String getTargetRegion ( ) { } }
if ( _region == null ) { _region = _locale . getCountry ( ) ; if ( _region . length ( ) == 0 ) { ULocale tmp = ULocale . addLikelySubtags ( _locale ) ; _region = tmp . getCountry ( ) ; if ( _region . length ( ) == 0 ) { _region = "001" ; } } } return _region ;
public class HttpBuilder { /** * Executes an asynchronous PUT request on the configured URI ( an asynchronous alias to the ` put ( Closure ) ` method ) , with additional configuration * provided by the configuration closure . * [ source , groovy ] * def http = HttpBuilder . configure { * request . uri = ' http : / / localhost : 10101' * def result = http . putAsync ( ) { * request . uri . path = ' / something ' * request . body = ' My content ' * request . contentType = ' text / plain ' * The configuration ` closure ` allows additional configuration for this request based on the { @ link HttpConfig } interface . * @ param closure the additional configuration closure ( delegated to { @ link HttpConfig } ) * @ return the { @ link CompletableFuture } containing the future result data */ public CompletableFuture < Object > putAsync ( @ DelegatesTo ( HttpConfig . class ) final Closure closure ) { } }
return CompletableFuture . supplyAsync ( ( ) -> put ( closure ) , getExecutor ( ) ) ;
public class IfcDocumentReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcRelAssociatesDocument > getDocumentRefForObjects ( ) { } }
return ( EList < IfcRelAssociatesDocument > ) eGet ( Ifc4Package . Literals . IFC_DOCUMENT_REFERENCE__DOCUMENT_REF_FOR_OBJECTS , true ) ;
public class DeliveryDelayManager { /** * Start the DeliveryDelayManager daemon . * @ param deliveryDelayScanInterval An interval in milliseconds which may be set via * a custom property and if zero or more will be used to override the * default deliverydelayscan interval which was set when the DeliveryDelayManager was created . * @ throws SevereMessageStoreException */ public final void start ( long deliveryDelayScanInterval , JsMessagingEngine jsme ) throws SevereMessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "start" , "interval=" + deliveryDelayScanInterval + " indexSize=" + deliveryDelayIndex . size ( ) ) ; messagingEngine = jsme ; if ( deliveryDelayScanInterval >= 0 ) // If an deliverydelayscan interval was given , use it { interval = deliveryDelayScanInterval ; } else // Otherwise , get it from the system property { // Get property for deliverydelayscan interval String value = messageStore . getProperty ( MessageStoreConstants . PROP_DELIVERY_DELAY_SCAN_INTERVAL , MessageStoreConstants . PROP_DELIVERY_DELAY_SCAN_INTERVAL_DEFAULT ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "start" , "Value from property=<" + value + ">" ) ; try { this . interval = Long . parseLong ( value . trim ( ) ) ; } catch ( NumberFormatException e ) { // No FFDC Code Needed . lastException = e ; lastExceptionTime = timeNow ( ) ; SibTr . debug ( this , tc , "start" , "Unable to parse property: " + e ) ; this . interval = 1000 ; // Use hard coded default as last resort } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "start" , "deliveryDelayScanInterval=" + this . interval ) ; // about to tinker with various variables so take the lock now synchronized ( lockObject ) { if ( interval < 1 ) { runEnabled = false ; addEnabled = false ; } else { if ( deliveryDelayAlarm == null ) { scanForInvalidDeliveryDelay ( ) ; runEnabled = true ; addEnabled = true ; deliveryDelayManagerStartTime = timeNow ( ) ; // Now we look at the size of the index and only schedule the first // alarm if the index is not empty . Remember that deliveryDelayables can be // added BEFORE the delivery delay manager is started so it may not be empty . if ( deliveryDelayIndex . size ( ) > 0 ) // If the index is not empty , { scheduleAlarm ( interval ) ; // . . . schedule the first alarm . } } else { // DeliveryDelayManager thread already running if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "DeliveryDelayManager already started" ) ; SevereMessageStoreException e = new SevereMessageStoreException ( "DELIVERYDELAYMANAGER_THREAD_ALREADY_RUNNING_SIMS2012" ) ; lastException = e ; lastExceptionTime = timeNow ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "start" ) ; throw e ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "start" , "runEnabled=" + runEnabled + " addEnabled=" + addEnabled + " interval=" + interval ) ;
public class Link { /** * Returns the Number of Words left and right of the Link , in the Bounds of the * HomeElement of this Link . */ public String getContext ( int wordsLeft , int wordsRight ) { } }
final String text = home_cc . getText ( ) ; int temp ; // get the left start position int posLeft = pos . getStart ( ) ; temp = posLeft - 1 ; while ( posLeft != 0 && wordsLeft > 0 ) { while ( temp > 0 && text . charAt ( temp ) < 48 ) { temp -- ; } while ( temp > 0 && text . charAt ( temp ) >= 48 ) { temp -- ; } posLeft = ( temp > 0 ? temp + 1 : 0 ) ; wordsLeft -- ; } // get the right end position int posRight = pos . getEnd ( ) ; temp = posRight ; while ( posRight != text . length ( ) && wordsRight > 0 ) { while ( temp < text . length ( ) && text . charAt ( temp ) < 48 ) { temp ++ ; } while ( temp < text . length ( ) && text . charAt ( temp ) >= 48 ) { temp ++ ; } posRight = temp ; wordsRight -- ; } // retrun a string . . . return text . substring ( posLeft , pos . getStart ( ) ) + text . substring ( pos . getEnd ( ) , posRight ) ;
public class IOUtils { /** * Close it and ignore any exceptions . */ public static void closeThrowSqlException ( Closeable closeable , String label ) throws SQLException { } }
if ( closeable != null ) { try { closeable . close ( ) ; } catch ( IOException e ) { throw SqlExceptionUtil . create ( "could not close " + label , e ) ; } }
public class CsvRowSchemaConverter { /** * Convert { @ link TypeInformation } to { @ link CsvSchema . ColumnType } based on Jackson ' s categories . */ private static CsvSchema . ColumnType convertType ( String fieldName , TypeInformation < ? > info ) { } }
if ( STRING_TYPES . contains ( info ) ) { return CsvSchema . ColumnType . STRING ; } else if ( NUMBER_TYPES . contains ( info ) ) { return CsvSchema . ColumnType . NUMBER ; } else if ( BOOLEAN_TYPES . contains ( info ) ) { return CsvSchema . ColumnType . BOOLEAN ; } else if ( info instanceof ObjectArrayTypeInfo ) { validateNestedField ( fieldName , ( ( ObjectArrayTypeInfo ) info ) . getComponentInfo ( ) ) ; return CsvSchema . ColumnType . ARRAY ; } else if ( info instanceof BasicArrayTypeInfo ) { validateNestedField ( fieldName , ( ( BasicArrayTypeInfo ) info ) . getComponentInfo ( ) ) ; return CsvSchema . ColumnType . ARRAY ; } else if ( info instanceof RowTypeInfo ) { final TypeInformation < ? > [ ] types = ( ( RowTypeInfo ) info ) . getFieldTypes ( ) ; for ( TypeInformation < ? > type : types ) { validateNestedField ( fieldName , type ) ; } return CsvSchema . ColumnType . ARRAY ; } else if ( info instanceof PrimitiveArrayTypeInfo && ( ( PrimitiveArrayTypeInfo ) info ) . getComponentType ( ) == Types . BYTE ) { return CsvSchema . ColumnType . STRING ; } else { throw new IllegalArgumentException ( "Unsupported type information '" + info . toString ( ) + "' for field '" + fieldName + "'." ) ; }
public class KeyVaultClientCustomImpl { /** * The update key operation changes specified attributes of a stored key and can * be applied to any key type and key version stored in Azure Key Vault . The * cryptographic material of a key itself cannot be changed . In order to perform * this operation , the key must already exist in the Key Vault . Authorization : * requires the keys / update permission . * @ param updateKeyRequest * the grouped properties for updating a key request * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < KeyBundle > updateKeyAsync ( UpdateKeyRequest updateKeyRequest , final ServiceCallback < KeyBundle > serviceCallback ) { } }
return updateKeyAsync ( updateKeyRequest . vaultBaseUrl ( ) , updateKeyRequest . keyName ( ) , updateKeyRequest . keyVersion ( ) , updateKeyRequest . keyOperations ( ) , updateKeyRequest . keyAttributes ( ) , updateKeyRequest . tags ( ) , serviceCallback ) ;
public class AbstractRenderer { /** * Given a rendering element , traverse the elements compute required bounds * to full display all elements . The method searches for { @ link Bounds } * elements which act to specify the required bounds when adjunct labels * are considered . * @ param element a rendering element * @ return the bounds required ( null if unspecified ) */ public Rectangle2D getBounds ( IRenderingElement element ) { } }
if ( element == null ) return null ; final Bounds bounds = new Bounds ( element ) ; return new Rectangle2D . Double ( bounds . minX , bounds . minY , bounds . width ( ) , bounds . height ( ) ) ;
public class GConvertImage { /** * Converts a { @ link Planar } into a { @ link ImageGray } by computing the average value of each pixel * across all the bands . * @ param input Input Planar image that is being converted . Not modified . * @ param output ( Optional ) The single band output image . If null a new image is created . Modified . * @ return Converted image . */ public static < T extends ImageGray < T > > T average ( ImageInterleaved input , T output ) { } }
ImageDataType type = input . getImageType ( ) . getDataType ( ) ; if ( type == ImageDataType . U8 ) { return ( T ) ConvertImage . average ( ( InterleavedU8 ) input , ( GrayU8 ) output ) ; } else if ( type == ImageDataType . S8 ) { return ( T ) ConvertImage . average ( ( InterleavedS8 ) input , ( GrayS8 ) output ) ; } else if ( type == ImageDataType . U16 ) { return ( T ) ConvertImage . average ( ( InterleavedU16 ) input , ( GrayU16 ) output ) ; } else if ( type == ImageDataType . S16 ) { return ( T ) ConvertImage . average ( ( InterleavedS16 ) input , ( GrayS16 ) output ) ; } else if ( type == ImageDataType . S32 ) { return ( T ) ConvertImage . average ( ( InterleavedS32 ) input , ( GrayS32 ) output ) ; } else if ( type == ImageDataType . S64 ) { return ( T ) ConvertImage . average ( ( InterleavedS64 ) input , ( GrayS64 ) output ) ; } else if ( type == ImageDataType . F32 ) { return ( T ) ConvertImage . average ( ( InterleavedF32 ) input , ( GrayF32 ) output ) ; } else if ( type == ImageDataType . F64 ) { return ( T ) ConvertImage . average ( ( InterleavedF64 ) input , ( GrayF64 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image type: " + type ) ; }