signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ImplDisparityScoreSadRectFive_U8 { /** * Using previously computed results it efficiently finds the disparity in the remaining rows . * When a new block is processes the last row / column is subtracted and the new row / column is * added . */ private void computeRemainingRows ( GrayU8 left , GrayU8 right ) { } }
for ( int row = regionHeight ; row < left . height ; row ++ , activeVerticalScore ++ ) { int oldRow = row % regionHeight ; int previous [ ] = verticalScore [ ( activeVerticalScore - 1 ) % regionHeight ] ; int active [ ] = verticalScore [ activeVerticalScore % regionHeight ] ; // subtract first row from vertical score int scores [ ] = horizontalScore [ oldRow ] ; for ( int i = 0 ; i < lengthHorizontal ; i ++ ) { active [ i ] = previous [ i ] - scores [ i ] ; } UtilDisparityScore . computeScoreRow ( left , right , row , scores , minDisparity , maxDisparity , regionWidth , elementScore ) ; // add the new score for ( int i = 0 ; i < lengthHorizontal ; i ++ ) { active [ i ] += scores [ i ] ; } if ( activeVerticalScore >= regionHeight - 1 ) { int top [ ] = verticalScore [ ( activeVerticalScore - 2 * radiusY ) % regionHeight ] ; int middle [ ] = verticalScore [ ( activeVerticalScore - radiusY ) % regionHeight ] ; int bottom [ ] = verticalScore [ activeVerticalScore % regionHeight ] ; computeScoreFive ( top , middle , bottom , fiveScore , left . width ) ; computeDisparity . process ( row - ( 1 + 4 * radiusY ) + 2 * radiusY + 1 , fiveScore ) ; } }
public class EbInterfaceReader { /** * Create a new reader builder . * @ param aClass * The UBL class to be read . May not be < code > null < / code > . * @ return The new reader builder . Never < code > null < / code > . * @ param < T > * The ebInterface document implementation type */ @ Nonnull public static < T > EbInterfaceReader < T > create ( @ Nonnull final Class < T > aClass ) { } }
return new EbInterfaceReader < > ( aClass ) ;
public class RestService { /** * Returns the first address in { @ code locations } that is equals to a public IP of the system * @ param locations The list of address ( hostname : port or ip : port ) to check * @ return The first address in { @ code locations } that is equals to a public IP of the system or null if none */ static String checkLocality ( String [ ] locations , Log log ) { } }
try { InetAddress [ ] candidates = NetworkUtils . getGlobalInterfaces ( ) ; for ( String address : locations ) { StringUtils . IpAndPort ipAndPort = StringUtils . parseIpAddress ( address ) ; InetAddress addr = InetAddress . getByName ( ipAndPort . ip ) ; for ( InetAddress candidate : candidates ) { if ( addr . equals ( candidate ) ) { return address ; } } } } catch ( SocketException e ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Unable to retrieve the global interfaces of the system" , e ) ; } } catch ( UnknownHostException e ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Unable to retrieve IP address" , e ) ; } } return null ;
public class UriEscapeUtil { /** * Perform an escape operation , based on a Reader , according to the specified type and writing the * result to a Writer . * Note this reader is going to be read char - by - char , so some kind of buffering might be appropriate if this * is an inconvenience for the specific Reader implementation . */ static void escape ( final Reader reader , final Writer writer , final UriEscapeType escapeType , final String encoding ) throws IOException { } }
if ( reader == null ) { return ; } int c1 , c2 ; // c0 : last char , c1 : current char , c2 : next char c2 = reader . read ( ) ; while ( c2 >= 0 ) { c1 = c2 ; c2 = reader . read ( ) ; final int codepoint = codePointAt ( ( char ) c1 , ( char ) c2 ) ; /* * Shortcut : most characters will be alphabetic , and we won ' t need to do anything at * all for them . No need to use the complete UriEscapeType check system at all . */ if ( UriEscapeType . isAlpha ( codepoint ) ) { writer . write ( c1 ) ; continue ; } /* * Check whether the character is allowed or not */ if ( escapeType . isAllowed ( codepoint ) ) { writer . write ( c1 ) ; continue ; } /* * We know we need to escape , so from here on we will only work with the codepoint - - we can advance * the chars . */ if ( Character . charCount ( codepoint ) > 1 ) { // This is to compensate that we are actually reading two char positions with a single codepoint . c1 = c2 ; c2 = reader . read ( ) ; } /* * Perform the real escape */ final byte [ ] charAsBytes ; try { charAsBytes = new String ( Character . toChars ( codepoint ) ) . getBytes ( encoding ) ; } catch ( final UnsupportedEncodingException e ) { throw new IllegalArgumentException ( "Exception while escaping URI: Bad encoding '" + encoding + "'" , e ) ; } for ( final byte b : charAsBytes ) { writer . write ( '%' ) ; writer . write ( printHexa ( b ) ) ; } }
public class JNRPERequest { /** * Initializes the object with the given command and the given list of ' ! ' * separated list of arguments . * @ param commandName * The command * @ param argumentsString * The arguments */ private void init ( final String commandName , final String argumentsString ) { } }
String fullCommandString ; String tmpArgumentsString = argumentsString ; if ( tmpArgumentsString != null && ! tmpArgumentsString . isEmpty ( ) && tmpArgumentsString . charAt ( 0 ) == '!' ) { tmpArgumentsString = tmpArgumentsString . substring ( 1 ) ; } if ( ! StringUtils . isBlank ( tmpArgumentsString ) ) { fullCommandString = commandName + "!" + tmpArgumentsString ; } else { fullCommandString = commandName ; } this . packet . setType ( PacketType . QUERY ) ; this . packet . setBuffer ( fullCommandString ) ; // updateCRC ( ) ;
public class SDMath { /** * Element - wise reciprocal ( inverse ) function : out [ i ] = 1 / in [ i ] * @ param name Name of the output variable * @ param a Input variable * @ return Output variable */ public SDVariable reciprocal ( String name , SDVariable a ) { } }
validateNumerical ( "reciprocal" , a ) ; SDVariable ret = f ( ) . reciprocal ( a ) ; return updateVariableNameAndReference ( ret , name ) ;
public class DefaultTextBundleRegistry { /** * Convert locale to string with language _ country [ _ variant ] * @ param locale * @ return locale string */ protected final String toLocaleStr ( Locale locale ) { } }
if ( locale == Locale . ROOT ) { return "" ; } String language = locale . getLanguage ( ) ; String country = locale . getCountry ( ) ; String variant = locale . getVariant ( ) ; if ( language == "" && country == "" && variant == "" ) { return "" ; } StringBuilder sb = new StringBuilder ( ) ; if ( variant != "" ) { sb . append ( language ) . append ( '_' ) . append ( country ) . append ( '_' ) . append ( variant ) ; } else if ( country != "" ) { sb . append ( language ) . append ( '_' ) . append ( country ) ; } else { sb . append ( language ) ; } return sb . toString ( ) ;
public class CmsUsersCsvDownloadDialog { /** * Generates the CSV file for the given users . < p > * @ return CSV file */ public String generateCsv ( ) { } }
Map < String , List < String > > objects = getData ( ) ; // get the data object from session List < String > groups = objects . get ( "groups" ) ; List < String > roles = objects . get ( "roles" ) ; Map < CmsUUID , CmsUser > exportUsers = new HashMap < CmsUUID , CmsUser > ( ) ; try { if ( ( ( groups == null ) || ( groups . size ( ) < 1 ) ) && ( ( roles == null ) || ( roles . size ( ) < 1 ) ) ) { exportUsers = CmsImportExportUserDialog . addExportAllUsers ( getCms ( ) , getParamOufqn ( ) , exportUsers ) ; } else { exportUsers = CmsImportExportUserDialog . addExportUsersFromGroups ( getCms ( ) , groups , exportUsers ) ; exportUsers = CmsImportExportUserDialog . addExportUsersFromRoles ( getCms ( ) , getParamOufqn ( ) , roles , exportUsers ) ; } } catch ( CmsException e ) { throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_GET_EXPORT_USERS_0 ) , e ) ; } StringBuffer buffer = new StringBuffer ( ) ; CmsUserExportSettings settings = OpenCms . getImportExportManager ( ) . getUserExportSettings ( ) ; String separator = CmsStringUtil . substitute ( settings . getSeparator ( ) , "\\t" , "\t" ) ; List < String > values = settings . getColumns ( ) ; buffer . append ( "name" ) ; Iterator < String > itValues = values . iterator ( ) ; while ( itValues . hasNext ( ) ) { buffer . append ( separator ) ; buffer . append ( itValues . next ( ) ) ; } buffer . append ( "\n" ) ; Object [ ] users = exportUsers . values ( ) . toArray ( ) ; for ( int i = 0 ; i < users . length ; i ++ ) { CmsUser exportUser = ( CmsUser ) users [ i ] ; if ( ! exportUser . getOuFqn ( ) . equals ( getParamOufqn ( ) ) ) { // skip users of others ous continue ; } if ( ! isExportable ( exportUser ) ) { continue ; } buffer . append ( exportUser . getSimpleName ( ) ) ; itValues = values . iterator ( ) ; while ( itValues . hasNext ( ) ) { buffer . append ( separator ) ; String curValue = itValues . next ( ) ; try { Method method = CmsUser . class . getMethod ( "get" + curValue . substring ( 0 , 1 ) . toUpperCase ( ) + curValue . substring ( 1 ) ) ; String curOutput = ( String ) method . invoke ( exportUser ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( curOutput ) || curOutput . equals ( "null" ) ) { curOutput = ( String ) exportUser . getAdditionalInfo ( curValue ) ; } if ( curValue . equals ( "password" ) ) { curOutput = OpenCms . getPasswordHandler ( ) . getDigestType ( ) + "_" + curOutput ; } if ( ! CmsStringUtil . isEmptyOrWhitespaceOnly ( curOutput ) && ! curOutput . equals ( "null" ) ) { buffer . append ( curOutput ) ; } } catch ( NoSuchMethodException e ) { Object obj = exportUser . getAdditionalInfo ( curValue ) ; if ( obj != null ) { String curOutput = String . valueOf ( obj ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( curOutput ) ) { buffer . append ( curOutput ) ; } } } catch ( IllegalAccessException e ) { throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_ILLEGAL_ACCESS_0 ) , e ) ; } catch ( InvocationTargetException e ) { throw new CmsRuntimeException ( Messages . get ( ) . container ( Messages . ERR_INVOCATION_TARGET_0 ) , e ) ; } } buffer . append ( "\n" ) ; } HttpServletResponse res = CmsFlexController . getController ( getJsp ( ) . getRequest ( ) ) . getTopResponse ( ) ; res . setContentType ( "text/comma-separated-values" ) ; String filename = "export_users" + new Random ( ) . nextInt ( 1024 ) + ".csv" ; res . setHeader ( "Content-Disposition" , new StringBuffer ( "attachment; filename=\"" ) . append ( filename ) . append ( "\"" ) . toString ( ) ) ; return buffer . toString ( ) ;
public class NetUtils { /** * Converts an IPv4 address to raw bytes , returning a byte [ 4 ] , or null if the input is malformed . * @ param ipv4Address The string representation of an ipv4 address . * @ return A { @ code byte [ ] } containing the parts of the v4 ip address . */ public static byte [ ] getRawAddress ( final String ipv4Address ) { } }
final Matcher m = IPV4_ADDRESS . matcher ( ipv4Address ) ; if ( ! m . find ( ) ) { return null ; } final byte [ ] addr = new byte [ 4 ] ; for ( int i = 0 ; i < 4 ; i ++ ) { final int intVal = Integer . parseInt ( m . group ( i + 1 ) ) & 0x00ff ; addr [ i ] = ( byte ) intVal ; } return addr ;
public class DocBookBuilder { /** * Builds a Report Chapter to be included in the book that displays a count of different types of errors and then a table to * list the errors , providing links and basic topic data . * @ param buildData Information and data structures for the build . * @ return The Docbook Report Chapter formatted as a String . */ private String buildReportChapter ( final BuildData buildData ) { } }
log . info ( "\tBuilding Report Chapter" ) ; final ContentSpec contentSpec = buildData . getContentSpec ( ) ; final String locale = buildData . getBuildLocale ( ) ; final ZanataDetails zanataDetails = buildData . getZanataDetails ( ) ; String reportChapter = "" ; final List < TopicErrorData > noContentTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . NO_CONTENT ) ; final List < TopicErrorData > invalidInjectionTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . INVALID_INJECTION ) ; final List < TopicErrorData > invalidContentTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . INVALID_CONTENT ) ; final List < TopicErrorData > invalidImageTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . INVALID_IMAGES ) ; final List < TopicErrorData > untranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . UNTRANSLATED ) ; final List < TopicErrorData > incompleteTranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . INCOMPLETE_TRANSLATION ) ; final List < TopicErrorData > fuzzyTranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . FUZZY_TRANSLATION ) ; final List < TopicErrorData > notPushedTranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . NOT_PUSHED_FOR_TRANSLATION ) ; final List < TopicErrorData > oldTranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . OLD_TRANSLATION ) ; final List < TopicErrorData > oldUntranslatedTopics = buildData . getErrorDatabase ( ) . getErrorsOfType ( locale , ErrorType . OLD_UNTRANSLATED ) ; final List < String > list = new LinkedList < String > ( ) ; list . add ( DocBookUtilities . buildListItem ( "Total Number of Errors: " + getNumErrors ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Total Number of Warnings: " + getNumWarnings ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics with No Content: " + noContentTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics with Invalid Injection points: " + invalidInjectionTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics with Invalid Content: " + invalidContentTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics with Invalid Image references: " + invalidImageTopics . size ( ) ) ) ; if ( buildData . isTranslationBuild ( ) ) { list . add ( DocBookUtilities . buildListItem ( "Number of Topics that haven't been pushed for Translation: " + notPushedTranslatedTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics that haven't been Translated: " + untranslatedTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics that have incomplete Translations: " + incompleteTranslatedTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics that have fuzzy Translations: " + fuzzyTranslatedTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics that haven't been Translated but are using previous revisions: " + oldUntranslatedTopics . size ( ) ) ) ; list . add ( DocBookUtilities . buildListItem ( "Number of Topics that have been Translated using a previous revision: " + oldTranslatedTopics . size ( ) ) ) ; } reportChapter += DocBookUtilities . wrapListItems ( list , "Build Statistics" ) ; // Add a link to show the zanata statistics if ( buildData . isTranslationBuild ( ) ) { reportChapter += generateAllTopicZanataUrl ( buildData ) ; } final boolean showEditorLinks = buildData . getBuildOptions ( ) . getInsertEditorLinks ( ) ; // Create the Report Tables reportChapter += ReportUtilities . buildReportTable ( noContentTopics , "Topics that have no Content" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( invalidContentTopics , "Topics that have Invalid XML Content" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( invalidInjectionTopics , "Topics that have Invalid Injection points in the XML" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( invalidImageTopics , "Topics that have Invalid Image references in the XML" , showEditorLinks , zanataDetails ) ; if ( buildData . isTranslationBuild ( ) ) { reportChapter += ReportUtilities . buildReportTable ( notPushedTranslatedTopics , "Topics that haven't been pushed for Translation" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( untranslatedTopics , "Topics that haven't been Translated" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( incompleteTranslatedTopics , "Topics that have Incomplete Translations" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( fuzzyTranslatedTopics , "Topics that have fuzzy Translations" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( oldUntranslatedTopics , "Topics that haven't been Translated but are using previous revisions" , showEditorLinks , zanataDetails ) ; reportChapter += ReportUtilities . buildReportTable ( oldTranslatedTopics , "Topics that have been Translated using a previous revision" , showEditorLinks , zanataDetails ) ; } if ( contentSpec . getBookType ( ) == BookType . ARTICLE || contentSpec . getBookType ( ) == BookType . ARTICLE_DRAFT ) { return DocBookBuildUtilities . addDocBookPreamble ( buildData . getDocBookVersion ( ) , DocBookUtilities . buildSection ( reportChapter , "Status Report" ) , "section" , buildData . getEntityFileName ( ) ) ; } else { return DocBookBuildUtilities . addDocBookPreamble ( buildData . getDocBookVersion ( ) , DocBookUtilities . buildChapter ( reportChapter , "Status Report" ) , "chapter" , buildData . getEntityFileName ( ) ) ; }
public class AppServiceEnvironmentsInner { /** * Move an App Service Environment to a different VNET . * Move an App Service Environment to a different VNET . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param vnetInfo Details for the new virtual network . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SiteInner & gt ; object */ public Observable < Page < SiteInner > > beginChangeVnetAsync ( final String resourceGroupName , final String name , final VirtualNetworkProfile vnetInfo ) { } }
return beginChangeVnetWithServiceResponseAsync ( resourceGroupName , name , vnetInfo ) . map ( new Func1 < ServiceResponse < Page < SiteInner > > , Page < SiteInner > > ( ) { @ Override public Page < SiteInner > call ( ServiceResponse < Page < SiteInner > > response ) { return response . body ( ) ; } } ) ;
public class AbstractCasWebflowConfigurer { /** * Create mapper to subflow state . * @ param mappings the mappings * @ return the mapper */ public Mapper createMapperToSubflowState ( final List < DefaultMapping > mappings ) { } }
val inputMapper = new DefaultMapper ( ) ; mappings . forEach ( inputMapper :: addMapping ) ; return inputMapper ;
public class Rebuilder { /** * Resizes existing hash array into a larger one within a single Memory assuming enough space . * This assumes a Memory preamble of standard form with the correct value of thetaLong . * The Memory lgArrLongs will change . * Afterwards , the caller must update local copies of lgArrLongs and hashTableThreshold from * Memory . * @ param mem the Memory * @ param preambleLongs the size of the preamble in longs * @ param srcLgArrLongs the size of the source hash table * @ param tgtLgArrLongs the LgArrLongs value for the new hash table */ static final void resize ( final WritableMemory mem , final int preambleLongs , final int srcLgArrLongs , final int tgtLgArrLongs ) { } }
// Note : This copies the Memory data onto the heap and then at the end copies the result // back to Memory . Even if we tried to do this directly into Memory it would require pre - clearing , // and the internal loops would be slower . The bulk copies are performed at a low level and // are quite fast . Measurements reveal that we are not paying much of a penalty . // Preamble stays in place final int preBytes = preambleLongs << 3 ; // Bulk copy source to on - heap buffer final int srcHTLen = 1 << srcLgArrLongs ; // current value final long [ ] srcHTArr = new long [ srcHTLen ] ; // on - heap src buffer mem . getLongArray ( preBytes , srcHTArr , 0 , srcHTLen ) ; // Create destination on - heap buffer final int dstHTLen = 1 << tgtLgArrLongs ; final long [ ] dstHTArr = new long [ dstHTLen ] ; // on - heap dst buffer // Rebuild hash table in destination buffer final long thetaLong = extractThetaLong ( mem ) ; HashOperations . hashArrayInsert ( srcHTArr , dstHTArr , tgtLgArrLongs , thetaLong ) ; // Bulk copy to destination memory mem . putLongArray ( preBytes , dstHTArr , 0 , dstHTLen ) ; // put it back , no need to clear insertLgArrLongs ( mem , tgtLgArrLongs ) ; // update in mem
public class ElementParametersImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCostParameters ( CostParameters newCostParameters ) { } }
if ( newCostParameters != costParameters ) { NotificationChain msgs = null ; if ( costParameters != null ) msgs = ( ( InternalEObject ) costParameters ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - BpsimPackage . ELEMENT_PARAMETERS__COST_PARAMETERS , null , msgs ) ; if ( newCostParameters != null ) msgs = ( ( InternalEObject ) newCostParameters ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - BpsimPackage . ELEMENT_PARAMETERS__COST_PARAMETERS , null , msgs ) ; msgs = basicSetCostParameters ( newCostParameters , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . ELEMENT_PARAMETERS__COST_PARAMETERS , newCostParameters , newCostParameters ) ) ;
public class Matchers { /** * Matches an AST node if it has the same erased type as the given class . */ public static < T extends Tree > Matcher < T > isSameType ( Class < ? > clazz ) { } }
return new IsSameType < > ( typeFromClass ( clazz ) ) ;
public class HeatMap { /** * this generates the heatmap off of the main thread , loads the data , makes the overlay , then * adds it to the map */ private void generateMap ( ) { } }
if ( getActivity ( ) == null ) // java . lang . IllegalStateException : Fragment HeatMap { 44f341d0 } not attached to Activity return ; if ( renderJobActive ) return ; renderJobActive = true ; int densityDpi = ( int ) ( dm . density * cellSizeInDp ) ; // 10 dpi sized cells IGeoPoint iGeoPoint = mMapView . getProjection ( ) . fromPixels ( 0 , 0 ) ; IGeoPoint iGeoPoint2 = mMapView . getProjection ( ) . fromPixels ( densityDpi , densityDpi ) ; // delta is the size of our cell in lat , lon // since this is zoom dependent , rerun the calculations on zoom changes double xCellSizeLongitude = Math . abs ( iGeoPoint . getLongitude ( ) - iGeoPoint2 . getLongitude ( ) ) ; double yCellSizeLatitude = Math . abs ( iGeoPoint . getLatitude ( ) - iGeoPoint2 . getLatitude ( ) ) ; BoundingBox view = mMapView . getBoundingBox ( ) ; // a set of a GeoPoints representing what we want a heat map of . List < IGeoPoint > pts = loadPoints ( view ) ; // the highest value in our collection of stuff int maxHeat = 0 ; // a temp container of all grid cells and their hit count ( which turns into a color on render ) // the lower the cell size the more cells and items in the map . Map < BoundingBox , Integer > heatmap = new HashMap < BoundingBox , Integer > ( ) ; // create the grid Log . i ( TAG , "heatmap builder " + yCellSizeLatitude + " " + xCellSizeLongitude ) ; Log . i ( TAG , "heatmap builder " + view ) ; // populate the cells for ( double lat = view . getLatNorth ( ) ; lat >= view . getLatSouth ( ) ; lat = lat - yCellSizeLatitude ) { for ( double lon = view . getLonEast ( ) ; lon >= view . getLonWest ( ) ; lon = lon - xCellSizeLongitude ) { // Log . i ( TAG , " heatmap builder " + lat + " , " + lon ) ; heatmap . put ( new BoundingBox ( lat , lon , lat - yCellSizeLatitude , lon - xCellSizeLongitude ) , 0 ) ; } } Log . i ( TAG , "generating the heatmap" ) ; long now = System . currentTimeMillis ( ) ; // generate the map , put the items in each cell for ( int i = 0 ; i < pts . size ( ) ; i ++ ) { // get the box for this pt ' s coordinates int x = increment ( pts . get ( i ) , heatmap ) ; if ( x > maxHeat ) maxHeat = x ; } Log . i ( TAG , "generating the heatmap, done " + ( System . currentTimeMillis ( ) - now ) ) ; // figure out the color scheme // if you need a more logirthmic scale , this is the place to do it . // cells with a 0 value are blank // cells 1 to 1/3 of the max value are yellow // cells from 1/3 to 2/3 are organge // cells 2/3 or higher are red int redthreshold = maxHeat * 2 / 3 ; // upper 1/3 int orangethreshold = maxHeat * 1 / 3 ; // middle 1/3 // render the map Log . i ( TAG , "rendering" ) ; now = System . currentTimeMillis ( ) ; // each bounding box if the hit count > 0 create a polygon with the bounding box coordinates with the right fill color final FolderOverlay group = new FolderOverlay ( ) ; Iterator < Map . Entry < BoundingBox , Integer > > iterator = heatmap . entrySet ( ) . iterator ( ) ; while ( iterator . hasNext ( ) ) { Map . Entry < BoundingBox , Integer > next = iterator . next ( ) ; if ( next . getValue ( ) > 0 ) { group . add ( createPolygon ( next . getKey ( ) , next . getValue ( ) , redthreshold , orangethreshold ) ) ; } } Log . i ( TAG , "render done , done " + ( System . currentTimeMillis ( ) - now ) ) ; if ( getActivity ( ) == null ) // java . lang . IllegalStateException : Fragment HeatMap { 44f341d0 } not attached to Activity return ; if ( mMapView == null ) // java . lang . IllegalStateException : Fragment HeatMap { 44f341d0 } not attached to Activity return ; mMapView . post ( new Runnable ( ) { @ Override public void run ( ) { if ( heatmapOverlay != null ) mMapView . getOverlayManager ( ) . remove ( heatmapOverlay ) ; mMapView . getOverlayManager ( ) . add ( group ) ; heatmapOverlay = group ; mMapView . invalidate ( ) ; renderJobActive = false ; } } ) ;
public class ConnectionManager { /** * Opens the project . * @ throws KnowledgeSourceBackendInitializationException if an error * occurs . */ void init ( ) throws KnowledgeSourceBackendInitializationException { } }
if ( this . project == null ) { Util . logger ( ) . log ( Level . FINE , "Opening Protege project {0}" , this . projectIdentifier ) ; this . project = initProject ( ) ; if ( this . project == null ) { throw new KnowledgeSourceBackendInitializationException ( "Could not load project " + this . projectIdentifier ) ; } else { this . protegeKnowledgeBase = this . project . getKnowledgeBase ( ) ; Util . logger ( ) . log ( Level . FINE , "Project {0} opened successfully" , this . projectIdentifier ) ; } }
public class ParameterProperty { /** * Set the non - null param set from given BitSet . * @ param nonNullSet * BitSet indicating which parameters are non - null */ public void setParamsWithProperty ( BitSet nonNullSet ) { } }
for ( int i = 0 ; i < 32 ; ++ i ) { setParamWithProperty ( i , nonNullSet . get ( i ) ) ; }
public class ComparatorCompat { /** * Returns a comparator that considers { @ code null } to be less than non - null . * If the specified comparator is { @ code null } , then the returned * comparator considers all non - null values to be equal . * @ param < T > the type of the objects compared by the comparator * @ param comparator a comparator for comparing non - null values * @ return a comparator */ @ NotNull public static < T > ComparatorCompat < T > nullsFirst ( @ Nullable Comparator < ? super T > comparator ) { } }
return nullsComparator ( true , comparator ) ;
public class RequestFromVertx { /** * Same like { @ link # parameter ( String ) } , but converts the parameter to * Integer if found . * The parameter is decoded by default . * @ param name The name of the post or query parameter * @ return The value of the parameter or null if not found . */ @ Override public Integer parameterAsInteger ( String name ) { } }
String parameter = parameter ( name ) ; try { return Integer . parseInt ( parameter ) ; } catch ( Exception e ) { // NOSONAR return null ; }
public class IpPermission { /** * [ VPC only ] The IPv6 ranges . * @ param ipv6Ranges * [ VPC only ] The IPv6 ranges . */ public void setIpv6Ranges ( java . util . Collection < Ipv6Range > ipv6Ranges ) { } }
if ( ipv6Ranges == null ) { this . ipv6Ranges = null ; return ; } this . ipv6Ranges = new com . amazonaws . internal . SdkInternalList < Ipv6Range > ( ipv6Ranges ) ;
public class GetSampledRequestsResult { /** * A complex type that contains detailed information about each of the requests in the sample . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSampledRequests ( java . util . Collection ) } or { @ link # withSampledRequests ( java . util . Collection ) } if you * want to override the existing values . * @ param sampledRequests * A complex type that contains detailed information about each of the requests in the sample . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetSampledRequestsResult withSampledRequests ( SampledHTTPRequest ... sampledRequests ) { } }
if ( this . sampledRequests == null ) { setSampledRequests ( new java . util . ArrayList < SampledHTTPRequest > ( sampledRequests . length ) ) ; } for ( SampledHTTPRequest ele : sampledRequests ) { this . sampledRequests . add ( ele ) ; } return this ;
public class TarArchive { /** * Set user and group information that will be used to fill in the tar archive ' s entry headers . Since Java currently * provides no means of determining a user name , user id , group name , or group id for a given File , TarArchive * allows the programmer to specify values to be used in their place . * @ param userId * The user Id to use in the headers . * @ param userName * The user name to use in the headers . * @ param groupId * The group id to use in the headers . * @ param groupName * The group name to use in the headers . */ public void setUserInfo ( int userId , String userName , int groupId , String groupName ) { } }
this . userId = userId ; this . userName = userName ; this . groupId = groupId ; this . groupName = groupName ;
public class AbstractSequenceClassifier { /** * Load a test file , run the classifier on it , and then write a Viterbi search * graph for each sequence . * @ param testFile * The file to test on . */ public void classifyAndWriteViterbiSearchGraph ( String testFile , String searchGraphPrefix , DocumentReaderAndWriter < IN > readerAndWriter ) throws IOException { } }
Timing timer = new Timing ( ) ; ObjectBank < List < IN > > documents = makeObjectBankFromFile ( testFile , readerAndWriter ) ; int numWords = 0 ; int numSentences = 0 ; for ( List < IN > doc : documents ) { DFSA < String , Integer > tagLattice = getViterbiSearchGraph ( doc , AnswerAnnotation . class ) ; numWords += doc . size ( ) ; PrintWriter latticeWriter = new PrintWriter ( new FileOutputStream ( searchGraphPrefix + '.' + numSentences + ".wlattice" ) ) ; PrintWriter vsgWriter = new PrintWriter ( new FileOutputStream ( searchGraphPrefix + '.' + numSentences + ".lattice" ) ) ; if ( readerAndWriter instanceof LatticeWriter ) ( ( LatticeWriter ) readerAndWriter ) . printLattice ( tagLattice , doc , latticeWriter ) ; tagLattice . printAttFsmFormat ( vsgWriter ) ; latticeWriter . close ( ) ; vsgWriter . close ( ) ; numSentences ++ ; } long millis = timer . stop ( ) ; double wordspersec = numWords / ( ( ( double ) millis ) / 1000 ) ; NumberFormat nf = new DecimalFormat ( "0.00" ) ; // easier way ! System . err . println ( this . getClass ( ) . getName ( ) + " tagged " + numWords + " words in " + numSentences + " documents at " + nf . format ( wordspersec ) + " words per second." ) ;
public class SimpleMisoSceneParser { /** * Parses the XML file on the supplied input stream into a scene model instance . */ public SimpleMisoSceneModel parseScene ( InputStream in ) throws IOException , SAXException { } }
_model = null ; _digester . push ( this ) ; _digester . parse ( in ) ; return _model ;
public class Util { /** * Get a string containing the stack of the specified exception * @ param e * @ return */ public static String stackToDebugString ( Throwable e ) { } }
StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; e . printStackTrace ( pw ) ; pw . close ( ) ; String text = sw . toString ( ) ; // Jump past the throwable text = text . substring ( text . indexOf ( "at" ) ) ; return text ;
public class JdbcExtractor { /** * Execute query using JDBC PreparedStatement to pass query parameters Set * fetch size * @ param cmds commands - query , fetch size , query parameters * @ return JDBC ResultSet * @ throws Exception */ private CommandOutput < ? , ? > executePreparedSql ( List < Command > cmds ) { } }
String query = null ; List < String > queryParameters = null ; int fetchSize = 0 ; for ( Command cmd : cmds ) { if ( cmd instanceof JdbcCommand ) { JdbcCommandType type = ( JdbcCommandType ) cmd . getCommandType ( ) ; switch ( type ) { case QUERY : query = cmd . getParams ( ) . get ( 0 ) ; break ; case QUERYPARAMS : queryParameters = cmd . getParams ( ) ; break ; case FETCHSIZE : fetchSize = Integer . parseInt ( cmd . getParams ( ) . get ( 0 ) ) ; break ; default : this . log . error ( "Command " + type . toString ( ) + " not recognized" ) ; break ; } } } this . log . info ( "Executing query:" + query ) ; ResultSet resultSet = null ; try { this . jdbcSource = createJdbcSource ( ) ; if ( this . dataConnection == null ) { this . dataConnection = this . jdbcSource . getConnection ( ) ; } PreparedStatement statement = this . dataConnection . prepareStatement ( query , ResultSet . TYPE_FORWARD_ONLY , ResultSet . CONCUR_READ_ONLY ) ; int parameterPosition = 1 ; if ( queryParameters != null && queryParameters . size ( ) > 0 ) { for ( String parameter : queryParameters ) { statement . setString ( parameterPosition , parameter ) ; parameterPosition ++ ; } } if ( fetchSize != 0 ) { statement . setFetchSize ( fetchSize ) ; } final boolean status = statement . execute ( ) ; if ( status == false ) { this . log . error ( "Failed to execute sql:" + query ) ; } resultSet = statement . getResultSet ( ) ; } catch ( Exception e ) { this . log . error ( "Failed to execute sql:" + query + " ;error-" + e . getMessage ( ) , e ) ; } CommandOutput < JdbcCommand , ResultSet > output = new JdbcCommandOutput ( ) ; output . put ( ( JdbcCommand ) cmds . get ( 0 ) , resultSet ) ; return output ;
public class SpringUtil { /** * Returns the resource at the specified location . Supports classpath references . * @ param location The resource location . * @ return The corresponding resource , or null if one does not exist . */ public static Resource getResource ( String location ) { } }
Resource resource = resolver . getResource ( location ) ; return resource . exists ( ) ? resource : null ;
public class ConcurrentConveyor { /** * Drains a batch of items from the queue at the supplied index into the * supplied collection . * @ return the number of items drained */ public final int drainTo ( int queueIndex , Collection < ? super E > drain ) { } }
return drain ( queues [ queueIndex ] , drain , Integer . MAX_VALUE ) ;
public class AbstractPrintQuery { /** * The instance method executes the query without an access check . * @ return true if the query contains values , else false * @ throws EFapsException on error */ public boolean executeWithoutAccessCheck ( ) throws EFapsException { } }
boolean ret = false ; if ( isMarked4execute ( ) ) { if ( getInstanceList ( ) . size ( ) > 0 ) { ret = executeOneCompleteStmt ( createSQLStatement ( ) , this . allSelects ) ; } if ( ret ) { for ( final OneSelect onesel : this . allSelects ) { if ( onesel . getFromSelect ( ) != null ) { onesel . getFromSelect ( ) . execute ( onesel ) ; } } } } return ret ;
public class ToolsClassFinder { /** * Searches for < code > tools . jar < / code > in various locations and uses an { @ link URLClassLoader } for * loading a class from this files . If the class could not be found in any , then a { @ link ClassNotFoundException } * is thrown . The locations used for lookup are ( in this order ) * < ul > * < li > $ JAVA _ HOME / . . / lib / tools . jar < / li > * < li > $ JAVA _ HOME / lib / tools . jar < / li > * < / ul > * < code > $ JAVA _ HOME < / code > here is the value of the system property < code > java . home < / code > * @ param pClassName class to lookup * @ return the found class * @ throws ClassNotFoundException if no class could be found */ public static Class lookupInToolsJar ( String pClassName ) throws ClassNotFoundException { } }
// Try to look up tools . jar within $ java . home , otherwise give up String extraInfo ; if ( JAVA_HOME != null ) { extraInfo = "JAVA_HOME is " + JAVA_HOME ; for ( File toolsJar : TOOLS_JAR_LOCATIONS ) { try { if ( toolsJar . exists ( ) ) { ClassLoader loader = createClassLoader ( toolsJar ) ; return loader . loadClass ( pClassName ) ; } } catch ( MalformedURLException e ) { // Cannot happen because the URL comes from a File . // And if , we throws an class not found exception . extraInfo = "Cannot create URL from " + toolsJar ; } } } else { extraInfo = "No JAVA_HOME set" ; } throw new ClassNotFoundException ( "No tools.jar found (" + extraInfo + ")" ) ;
public class sslcrl { /** * Use this API to unset the properties of sslcrl resource . * Properties that need to be unset are specified in args array . */ public static base_response unset ( nitro_service client , sslcrl resource , String [ ] args ) throws Exception { } }
sslcrl unsetresource = new sslcrl ( ) ; unsetresource . crlname = resource . crlname ; return unsetresource . unset_resource ( client , args ) ;
public class Declarations { /** * Upcasts a Builder instance to the generated superclass , to allow access to private fields . * < p > Reuses an existing upcast instance if one was already declared in this scope . * @ param code the { @ link SourceBuilder } to add the declaration to * @ param datatype metadata about the user type the builder is being generated for * @ param builder the Builder instance to upcast * @ returns a variable holding the upcasted instance */ public static Variable upcastToGeneratedBuilder ( SourceBuilder code , Datatype datatype , String builder ) { } }
return code . scope ( ) . computeIfAbsent ( Declaration . UPCAST , ( ) -> { Variable base = new Variable ( "base" ) ; code . addLine ( UPCAST_COMMENT ) . addLine ( "%s %s = %s;" , datatype . getGeneratedBuilder ( ) , base , builder ) ; return base ; } ) ;
public class NfsFileBase { /** * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . NfsFile # commit ( long , int ) */ public NfsCommitResponse commit ( long offsetToCommit , int dataSizeToCommit ) throws IOException { } }
return getNfs ( ) . wrapped_sendCommit ( makeCommitRequest ( offsetToCommit , dataSizeToCommit ) ) ;
public class AbstractDataType { /** * Validate that current definition can be updated with the new definition * @ param newType */ @ Override public void validateUpdate ( IDataType newType ) throws TypeUpdateException { } }
if ( ! getName ( ) . equals ( newType . getName ( ) ) || ! getClass ( ) . getName ( ) . equals ( newType . getClass ( ) . getName ( ) ) ) { throw new TypeUpdateException ( newType ) ; }
public class HttpInputStream { /** * Fills input buffer with more bytes . */ protected void fill ( ) throws IOException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "fill" , "fill" ) ; } // PK79219 Start long longLeft = limit - total ; int len ; if ( longLeft > Integer . MAX_VALUE ) len = buf . length ; else len = Math . min ( buf . length , ( int ) longLeft ) ; // PK79219 End if ( len > 0 ) { len = in . read ( buf , 0 , len ) ; if ( len > 0 ) { pos = 0 ; count = len ; } }
public class OnChangeSubscriptionQos { /** * internal method required to prevent findbugs warning */ private OnChangeSubscriptionQos setMinIntervalMsInternal ( final long minIntervalMs ) { } }
if ( minIntervalMs < MIN_MIN_INTERVAL_MS ) { this . minIntervalMs = MIN_MIN_INTERVAL_MS ; logger . warn ( "minIntervalMs < MIN_MIN_INTERVAL_MS. Using MIN_MIN_INTERVAL_MS: {}" , MIN_MIN_INTERVAL_MS ) ; } else if ( minIntervalMs > MAX_MIN_INTERVAL_MS ) { this . minIntervalMs = MAX_MIN_INTERVAL_MS ; logger . warn ( "minIntervalMs > MAX_MIN_INTERVAL_MS. Using MAX_MIN_INTERVAL_MS: {}" , MAX_MIN_INTERVAL_MS ) ; } else { this . minIntervalMs = minIntervalMs ; } return this ;
public class Wxs { /** * 将一个WxOutMsg转为主动信息所需要的Json文本 * @ param msg * 微信消息输出对象 * @ return 输出的 JSON 文本 */ public static void asJson ( Writer writer , WxOutMsg msg ) { } }
NutMap map = new NutMap ( ) ; map . put ( "touser" , msg . getToUserName ( ) ) ; map . put ( "msgtype" , msg . getMsgType ( ) ) ; switch ( WxMsgType . valueOf ( msg . getMsgType ( ) ) ) { case text : map . put ( "text" , new NutMap ( ) . setv ( "content" , msg . getContent ( ) ) ) ; break ; case image : map . put ( "image" , new NutMap ( ) . setv ( "media_id" , msg . getImage ( ) . getMediaId ( ) ) ) ; break ; case voice : map . put ( "voice" , new NutMap ( ) . setv ( "media_id" , msg . getVoice ( ) . getMediaId ( ) ) ) ; break ; case video : NutMap _video = new NutMap ( ) ; _video . setv ( "media_id" , msg . getVideo ( ) . getMediaId ( ) ) ; if ( msg . getVideo ( ) . getTitle ( ) != null ) _video . put ( "title" , ( msg . getVideo ( ) . getTitle ( ) ) ) ; if ( msg . getVideo ( ) . getDescription ( ) != null ) _video . put ( "description" , ( msg . getVideo ( ) . getDescription ( ) ) ) ; map . put ( "video" , _video ) ; break ; case music : NutMap _music = new NutMap ( ) ; WxMusic music = msg . getMusic ( ) ; if ( music . getTitle ( ) != null ) _music . put ( "title" , ( music . getTitle ( ) ) ) ; if ( music . getDescription ( ) != null ) _music . put ( "description" , ( music . getDescription ( ) ) ) ; if ( music . getMusicUrl ( ) != null ) _music . put ( "musicurl" , ( music . getMusicUrl ( ) ) ) ; if ( music . getHQMusicUrl ( ) != null ) _music . put ( "hqmusicurl" , ( music . getHQMusicUrl ( ) ) ) ; _music . put ( "thumb_media_id" , ( music . getThumbMediaId ( ) ) ) ; break ; case news : NutMap _news = new NutMap ( ) ; List < NutMap > list = new ArrayList < NutMap > ( ) ; for ( WxArticle article : msg . getArticles ( ) ) { NutMap item = new NutMap ( ) ; if ( article . getTitle ( ) != null ) item . put ( "title" , ( article . getTitle ( ) ) ) ; if ( article . getDescription ( ) != null ) item . put ( "description" , ( article . getDescription ( ) ) ) ; if ( article . getPicUrl ( ) != null ) item . put ( "picurl" , ( article . getPicUrl ( ) ) ) ; if ( article . getUrl ( ) != null ) item . put ( "url" , ( article . getUrl ( ) ) ) ; list . add ( item ) ; } _news . put ( "articles" , list ) ; map . put ( "news" , _news ) ; break ; case mpnews : map . put ( "mpnews" , new NutMap ( ) . setv ( "media_id" , msg . getMedia_id ( ) ) ) ; break ; case wxcard : map . put ( "wxcard" , new NutMap ( ) . setv ( "card_id" , msg . getCard ( ) . getId ( ) ) . setv ( "card_ext" , msg . getCard ( ) . getExt ( ) ) ) ; break ; default : break ; } Json . toJson ( writer , map ) ;
public class DescribeDirectoriesRequest { /** * A list of identifiers of the directories for which to obtain the information . If this member is null , all * directories that belong to the current account are returned . * An empty list results in an < code > InvalidParameterException < / code > being thrown . * @ return A list of identifiers of the directories for which to obtain the information . If this member is null , all * directories that belong to the current account are returned . < / p > * An empty list results in an < code > InvalidParameterException < / code > being thrown . */ public java . util . List < String > getDirectoryIds ( ) { } }
if ( directoryIds == null ) { directoryIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return directoryIds ;
public class GenListModuleReader { /** * Get all targets except copy - to . * @ return set of target file path with option format after * { @ link org . dita . dost . util . Constants # STICK STICK } */ public Set < Reference > getNonCopytoResult ( ) { } }
final Set < Reference > nonCopytoSet = new LinkedHashSet < > ( 128 ) ; nonCopytoSet . addAll ( nonConrefCopytoTargets ) ; for ( final URI f : conrefTargets ) { nonCopytoSet . add ( new Reference ( stripFragment ( f ) , currentFileFormat ( ) ) ) ; } for ( final URI f : copytoMap . values ( ) ) { nonCopytoSet . add ( new Reference ( stripFragment ( f ) ) ) ; } for ( final URI f : ignoredCopytoSourceSet ) { nonCopytoSet . add ( new Reference ( stripFragment ( f ) ) ) ; } for ( final URI filename : coderefTargetSet ) { nonCopytoSet . add ( new Reference ( stripFragment ( filename ) ) ) ; } return nonCopytoSet ;
public class IncomeOperand { /** * Gets the tier value for this IncomeOperand . * @ return tier * Income tier specifying an income bracket that a household falls * under . Tier 1 belongs to the * highest income bracket . * < span class = " constraint Required " > This field is * required and should not be { @ code null } . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . IncomeTier getTier ( ) { } }
return tier ;
public class ClassPathUtils { /** * Find the root path for the given resource . If the resource is found in a Jar file , then the * result will be an absolute path to the jar file . If the resource is found in a directory , * then the result will be the parent path of the given resource . * For example , if the resourceName is given as " scripts / myscript . groovy " , and the path to the file is * " / root / sub1 / script / myscript . groovy " , then this method will return " / root / sub1" * @ param resourceName relative path of the resource to search for . E . G . " scripts / myscript . groovy " * @ param classLoader the { @ link ClassLoader } to search * @ return absolute path of the root of the resource . */ @ Nullable public static Path findRootPathForResource ( String resourceName , ClassLoader classLoader ) { } }
Objects . requireNonNull ( resourceName , "resourceName" ) ; Objects . requireNonNull ( classLoader , "classLoader" ) ; URL resource = classLoader . getResource ( resourceName ) ; if ( resource != null ) { String protocol = resource . getProtocol ( ) ; if ( protocol . equals ( "jar" ) ) { return getJarPathFromUrl ( resource ) ; } else if ( protocol . equals ( "file" ) ) { return getRootPathFromDirectory ( resourceName , resource ) ; } else { throw new IllegalStateException ( "Unsupported URL protocol: " + protocol ) ; } } return null ;
public class Column { /** * Decodes the column ' s { @ code byte [ ] } buffer . * A { @ code null } is returned if the column is nullable and the * { @ code byte [ ] } buffer equals { @ link Column # getNullValue ( ) nullValue } . * @ param buffer { @ code byte [ ] } of size { @ link # getSize ( ) } * @ return { @ code < T > } * @ throws InvalidArgument Thrown if { @ code buffer } is null or its length is * not equal to { @ link # getSize ( ) } */ public T decode ( byte [ ] buffer ) { } }
if ( buffer == null ) { throw new InvalidArgument ( "buffer" , buffer ) ; } else if ( buffer . length != size ) { final String fmt = "cannot decode %s bytes, expected %d" ; final String msg = format ( fmt , buffer . length , size ) ; throw new InvalidArgument ( msg ) ; } if ( nullable ) { final byte [ ] nullValue = getNullValue ( ) ; if ( Arrays . equals ( buffer , nullValue ) ) { return null ; } } return decodeData ( buffer ) ;
public class BinaryJedis { /** * Return a subset of the string from offset start to offset end ( both offsets are inclusive ) . * Negative offsets can be used in order to provide an offset starting from the end of the string . * So - 1 means the last char , - 2 the penultimate and so forth . * The function handles out of range requests without raising an error , but just limiting the * resulting range to the actual length of the string . * Time complexity : O ( start + n ) ( with start being the start index and n the total length of the * requested range ) . Note that the lookup part of this command is O ( 1 ) so for small strings this * is actually an O ( 1 ) command . * @ param key * @ param start * @ param end * @ return Bulk reply */ @ Override public byte [ ] substr ( final byte [ ] key , final int start , final int end ) { } }
checkIsInMultiOrPipeline ( ) ; client . substr ( key , start , end ) ; return client . getBinaryBulkReply ( ) ;
public class ConstructorInstrumenter { /** * Ensures that the given sampler will be invoked every time a constructor for class c is invoked . * @ param c The class to be tracked * @ param sampler the code to be invoked when an instance of c is constructed * @ throws UnmodifiableClassException if c cannot be modified . */ public static void instrumentClass ( Class < ? > c , ConstructorCallback < ? > sampler ) throws UnmodifiableClassException { } }
// IMPORTANT : Don ' t forget that other threads may be accessing this // class while this code is running . Specifically , the class may be // executed directly after the retransformClasses is called . Thus , we need // to be careful about what happens after the retransformClasses call . synchronized ( samplerPutAtomicityLock ) { List < ConstructorCallback < ? > > list = samplerMap . get ( c ) ; if ( list == null ) { CopyOnWriteArrayList < ConstructorCallback < ? > > samplerList = new CopyOnWriteArrayList < ConstructorCallback < ? > > ( ) ; samplerList . add ( sampler ) ; samplerMap . put ( c , samplerList ) ; Instrumentation inst = AllocationRecorder . getInstrumentation ( ) ; Class < ? > [ ] cs = new Class < ? > [ 1 ] ; cs [ 0 ] = c ; inst . retransformClasses ( c ) ; } else { list . add ( sampler ) ; } }
public class Descriptor { /** * Filename of the form " < ksname > - < cfname > - [ tmp - ] [ < version > - ] < gen > - < component > " * @ param directory The directory of the SSTable files * @ param name The name of the SSTable file * @ param skipComponent true if the name param should not be parsed for a component tag * @ return A Descriptor for the SSTable , and the Component remainder . */ public static Pair < Descriptor , String > fromFilename ( File directory , String name , boolean skipComponent ) { } }
// tokenize the filename StringTokenizer st = new StringTokenizer ( name , String . valueOf ( separator ) ) ; String nexttok ; // all filenames must start with keyspace and column family String ksname = st . nextToken ( ) ; String cfname = st . nextToken ( ) ; // optional temporary marker nexttok = st . nextToken ( ) ; Type type = Type . FINAL ; if ( nexttok . equals ( Type . TEMP . marker ) ) { type = Type . TEMP ; nexttok = st . nextToken ( ) ; } else if ( nexttok . equals ( Type . TEMPLINK . marker ) ) { type = Type . TEMPLINK ; nexttok = st . nextToken ( ) ; } if ( ! Version . validate ( nexttok ) ) throw new UnsupportedOperationException ( "SSTable " + name + " is too old to open. Upgrade to 2.0 first, and run upgradesstables" ) ; Version version = new Version ( nexttok ) ; nexttok = st . nextToken ( ) ; int generation = Integer . parseInt ( nexttok ) ; // component suffix String component = null ; if ( ! skipComponent ) component = st . nextToken ( ) ; directory = directory != null ? directory : new File ( "." ) ; return Pair . create ( new Descriptor ( version , directory , ksname , cfname , generation , type ) , component ) ;
public class GetBundlesResult { /** * An array of key - value pairs that contains information about the available bundles . * @ param bundles * An array of key - value pairs that contains information about the available bundles . */ public void setBundles ( java . util . Collection < Bundle > bundles ) { } }
if ( bundles == null ) { this . bundles = null ; return ; } this . bundles = new java . util . ArrayList < Bundle > ( bundles ) ;
public class EditText { /** * @ return the minimum height of this TextView expressed in pixels , or - 1 if the minimum * height was set in number of lines instead using { @ link # setMinLines ( int ) or # setLines ( int ) } . * @ see # setMinHeight ( int ) * @ attr ref android . R . styleable # TextView _ minHeight */ @ TargetApi ( Build . VERSION_CODES . JELLY_BEAN ) public int getMinHeight ( ) { } }
if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN ) return mInputView . getMinHeight ( ) ; return - 1 ;
public class CmsSitemapView { /** * Performs necessary async actions before actually setting the mode . < p > * @ param mode the mode */ public void onBeforeSetEditorMode ( final EditorMode mode ) { } }
EditorMode oldMode = m_editorMode ; if ( ( oldMode == EditorMode . categories ) && ( ( mode == EditorMode . vfs ) || ( mode == EditorMode . navigation ) ) ) { final CmsRpcAction < Void > action = new CmsRpcAction < Void > ( ) { @ Override public void execute ( ) { start ( 200 , true ) ; getController ( ) . refreshRoot ( this ) ; } @ Override protected void onResponse ( Void result ) { stop ( false ) ; setEditorMode ( mode ) ; } } ; action . execute ( ) ; } else { setEditorMode ( mode ) ; }
public class BidiOrder { /** * Set resultLevels from start up to ( but not including ) limit to newLevel . */ private void setLevels ( int start , int limit , byte newLevel ) { } }
for ( int i = start ; i < limit ; ++ i ) { resultLevels [ i ] = newLevel ; }
public class CostlessMeldPairingHeap { /** * Cut the oldest child of a node . * @ param n * the node * @ return the oldest child of a node or null */ private Node < K , V > cutOldestChild ( Node < K , V > n ) { } }
Node < K , V > oldestChild = n . o_c ; if ( oldestChild != null ) { if ( oldestChild . y_s != null ) { oldestChild . y_s . o_s = n ; } n . o_c = oldestChild . y_s ; oldestChild . y_s = null ; oldestChild . o_s = null ; } return oldestChild ;
public class RestoreArgs { /** * Set TTL in { @ code milliseconds } after restoring the key . * @ param ttl time to live . * @ return { @ code this } . */ public RestoreArgs ttl ( Duration ttl ) { } }
LettuceAssert . notNull ( ttl , "Time to live must not be null" ) ; return ttl ( ttl . toMillis ( ) ) ;
public class BigtableSession { /** * Snapshot operations need various aspects of a { @ link BigtableClusterName } . This method gets a * clusterId from either a lookup ( projectId and instanceId translate to a single clusterId when * an instance has only one cluster ) . */ public synchronized BigtableClusterName getClusterName ( ) throws IOException { } }
if ( this . clusterName == null ) { try ( BigtableClusterUtilities util = new BigtableClusterUtilities ( options ) ) { ListClustersResponse clusters = util . getClusters ( ) ; Preconditions . checkState ( clusters . getClustersCount ( ) == 1 , String . format ( "Project '%s' / Instance '%s' has %d clusters. There must be exactly 1 for this operation to work." , options . getProjectId ( ) , options . getInstanceId ( ) , clusters . getClustersCount ( ) ) ) ; clusterName = new BigtableClusterName ( clusters . getClusters ( 0 ) . getName ( ) ) ; } catch ( GeneralSecurityException e ) { throw new IOException ( "Could not get cluster Id." , e ) ; } } return clusterName ;
public class RythmEngine { /** * Prepare the render operation environment settings * @ param codeType * @ param locale * @ param usrCtx * @ return the engine instance itself */ public final RythmEngine prepare ( ICodeType codeType , Locale locale , Map < String , Object > usrCtx ) { } }
renderSettings . init ( codeType ) . init ( locale ) . init ( usrCtx ) ; return this ;
public class LWJGFont { /** * Draws the paragraph given by the specified string , using this font instance ' s current color . < br > * if the specified string protrudes from paragraphWidth , protruded substring is auto wrapped with left align . < br > * Note that the specified destination coordinates is a left point of the rendered string ' s baseline . * @ param text the string to be drawn . * @ param dstX the x coordinate to render the string . * @ param dstY the y coordinate to render the string . * @ param dstZ the z coordinate to render the string . * @ param paragraphWidth the max width to draw the paragraph . * @ throws IOException Indicates a failure to read font images as textures . */ public final void drawParagraph ( String text , float dstX , float dstY , float dstZ , float paragraphWidth ) throws IOException { } }
this . drawParagraph ( text , dstX , dstY , dstZ , paragraphWidth , ALIGN . LEGT ) ;
public class ResolveFileAction { /** * { @ inheritDoc } */ @ Override public void onBaseline ( Collection < File > baseline ) { } }
onChange ( baseline , Collections . < File > emptyList ( ) , Collections . < File > emptyList ( ) ) ;
public class LazyMultiLoaderWithInclude { /** * Loads the specified ids . */ @ Override public < TResult > Lazy < Map < String , TResult > > load ( Class < TResult > clazz , Collection < String > ids ) { } }
return _session . lazyLoadInternal ( clazz , ids . toArray ( new String [ 0 ] ) , _includes . toArray ( new String [ 0 ] ) , null ) ;
public class DefaultQueueManager { /** * key对应的队里产生了处理任务 * @ param key * @ param handleTask */ protected void onQueueHandleTask ( String key , Runnable handleTask ) { } }
KeyGroupEventListener tmp = listener ; if ( tmp != null ) { // 任务抛给监听者 tmp . onQueueHandleTask ( key , handleTask ) ; }
public class ThriftCLIServiceClient { /** * / * ( non - Javadoc ) * @ see org . apache . hive . service . cli . ICLIService # getColumns ( org . apache . hive . service . cli . SessionHandle ) */ @ Override public OperationHandle getColumns ( SessionHandle sessionHandle , String catalogName , String schemaName , String tableName , String columnName ) throws HiveSQLException { } }
try { TGetColumnsReq req = new TGetColumnsReq ( ) ; req . setSessionHandle ( sessionHandle . toTSessionHandle ( ) ) ; req . setCatalogName ( catalogName ) ; req . setSchemaName ( schemaName ) ; req . setTableName ( tableName ) ; req . setColumnName ( columnName ) ; TGetColumnsResp resp = cliService . GetColumns ( req ) ; checkStatus ( resp . getStatus ( ) ) ; TProtocolVersion protocol = sessionHandle . getProtocolVersion ( ) ; return new OperationHandle ( resp . getOperationHandle ( ) , protocol ) ; } catch ( HiveSQLException e ) { throw e ; } catch ( Exception e ) { throw new HiveSQLException ( e ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIRD ( ) { } }
if ( irdEClass == null ) { irdEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 283 ) ; } return irdEClass ;
public class PropertyDescriptor { /** * Returns true if this property type is applicable to the * given target type . * The default implementation of this method checks if the given node type is assignable * according to the parameterization , but subtypes can extend this to change this behavior . * @ return * true to indicate applicable , in which case the property will be * displayed in the configuration screen of the target , for example . */ public boolean isApplicable ( Class < ? extends T > targetType ) { } }
Class < ? extends T > applicable = Functions . getTypeParameter ( clazz , getP ( ) , 0 ) ; return applicable . isAssignableFrom ( targetType ) ;
public class SortableBehavior { /** * Moves the sorting element or helper so the cursor always appears to drag from the * same position . Coordinates can be given as a hash using a combination of one or two * keys : { top , left , right , bottom } * @ param cusorAt * @ return instance of the current behavior */ public SortableBehavior setCursorAt ( CursorAtEnum cusorAt ) { } }
this . options . putLiteral ( "cursorAt" , cusorAt . toString ( ) . toLowerCase ( ) . replace ( '_' , ' ' ) ) ; return this ;
public class Session { /** * Reads the idToRequest map from a JSON stream * @ param coronaSerializer The CoronaSerializer instance to be used to * read the JSON * @ throws IOException */ private void readIdToRequest ( CoronaSerializer coronaSerializer ) throws IOException { } }
coronaSerializer . readField ( "idToRequest" ) ; // Expecting the START _ OBJECT token for idToRequest coronaSerializer . readStartObjectToken ( "idToRequest" ) ; JsonToken current = coronaSerializer . nextToken ( ) ; while ( current != JsonToken . END_OBJECT ) { Integer id = Integer . parseInt ( coronaSerializer . getFieldName ( ) ) ; idToRequest . put ( id , new ResourceRequestInfo ( coronaSerializer ) ) ; current = coronaSerializer . nextToken ( ) ; } // Done with reading the END _ OBJECT token for idToRequest
public class UpfrontAllocatingPageSource { /** * Frees the supplied buffer . * If the given buffer was not allocated by this source or has already been * freed then an { @ code AssertionError } is thrown . */ @ Override public synchronized void free ( Page page ) { } }
if ( page . isFreeable ( ) ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Freeing a {}B buffer from chunk {} &{}" , DebuggingUtils . toBase2SuffixedString ( page . size ( ) ) , page . index ( ) , page . address ( ) ) ; } markAllAvailable ( ) ; sliceAllocators . get ( page . index ( ) ) . free ( page . address ( ) , page . size ( ) ) ; victims . get ( page . index ( ) ) . remove ( page ) ; victimAllocators . get ( page . index ( ) ) . tryFree ( page . address ( ) , page . size ( ) ) ; if ( ! fallingThresholds . isEmpty ( ) ) { long allocated = getAllocatedSize ( ) ; fireThresholds ( allocated + page . size ( ) , allocated ) ; } }
public class LabAccountsInner { /** * List lab accounts in a subscription . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; LabAccountInner & gt ; object */ public Observable < Page < LabAccountInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < Page < LabAccountInner > > , Page < LabAccountInner > > ( ) { @ Override public Page < LabAccountInner > call ( ServiceResponse < Page < LabAccountInner > > response ) { return response . body ( ) ; } } ) ;
public class Object2ObjectHashMap { /** * { @ inheritDoc } */ public V remove ( final Object key ) { } }
final Object [ ] entries = this . entries ; final int mask = entries . length - 1 ; int keyIndex = Hashing . evenHash ( key . hashCode ( ) , mask ) ; Object oldValue = null ; while ( entries [ keyIndex + 1 ] != null ) { if ( entries [ keyIndex ] == key || entries [ keyIndex ] . equals ( key ) ) { oldValue = entries [ keyIndex + 1 ] ; entries [ keyIndex ] = null ; entries [ keyIndex + 1 ] = null ; size -- ; compactChain ( keyIndex ) ; break ; } keyIndex = next ( keyIndex , mask ) ; } return unmapNullValue ( oldValue ) ;
public class CustomRuleMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CustomRule customRule , ProtocolMarshaller protocolMarshaller ) { } }
if ( customRule == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( customRule . getSource ( ) , SOURCE_BINDING ) ; protocolMarshaller . marshall ( customRule . getTarget ( ) , TARGET_BINDING ) ; protocolMarshaller . marshall ( customRule . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( customRule . getCondition ( ) , CONDITION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DescribeSigningJobResult { /** * Map of user - assigned key - value pairs used during signing . These values contain any information that you specified * for use in your signing job . * @ param signingParameters * Map of user - assigned key - value pairs used during signing . These values contain any information that you * specified for use in your signing job . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeSigningJobResult withSigningParameters ( java . util . Map < String , String > signingParameters ) { } }
setSigningParameters ( signingParameters ) ; return this ;
public class XMLChecker { /** * Checks if the specified string matches the < em > PubidLiteral < / em > production . * See : < a href = " http : / / www . w3 . org / TR / REC - xml # NT - PubidLiteral " > Definition of PubidLiteral < / a > . * @ param s the character string to check , cannot be < code > null < / code > . * @ throws NullPointerException if < code > s = = null < / code > . * @ throws InvalidXMLException if the specified character string does not match the * < em > PubidLiteral < / em > production . */ public static final void checkPubidLiteral ( String s ) throws NullPointerException , InvalidXMLException { } }
checkPubidLiteral ( s . toCharArray ( ) , 0 , s . length ( ) ) ;
public class HeapUpdateDoublesSketch { /** * Obtains a new on - heap instance of a DoublesSketch . * @ param k Parameter that controls space usage of sketch and accuracy of estimates . * Must be greater than 1 and less than 65536 and a power of 2. * @ return a HeapUpdateDoublesSketch */ static HeapUpdateDoublesSketch newInstance ( final int k ) { } }
final HeapUpdateDoublesSketch hqs = new HeapUpdateDoublesSketch ( k ) ; final int baseBufAlloc = 2 * Math . min ( DoublesSketch . MIN_K , k ) ; // the min is important hqs . n_ = 0 ; hqs . combinedBuffer_ = new double [ baseBufAlloc ] ; hqs . baseBufferCount_ = 0 ; hqs . bitPattern_ = 0 ; hqs . minValue_ = Double . NaN ; hqs . maxValue_ = Double . NaN ; return hqs ;
public class StoringLocalLogs { /** * Add a new log entry to the local storage . * @ param logType the log type to store * @ param entry the entry to store */ @ Override public void addEntry ( String logType , LogEntry entry ) { } }
if ( ! logTypesToInclude . contains ( logType ) ) { return ; } if ( ! localLogs . containsKey ( logType ) ) { List < LogEntry > entries = new ArrayList < > ( ) ; entries . add ( entry ) ; localLogs . put ( logType , entries ) ; } else { localLogs . get ( logType ) . add ( entry ) ; }
public class Version { /** * Compares this version to the other version . * This method does not take into account the versions ' build * metadata . If you want to compare the versions ' build metadata * use the { @ code Version . compareWithBuildsTo } method or the * { @ code Version . BUILD _ AWARE _ ORDER } comparator . * @ param other the other version to compare to * @ return a negative integer , zero or a positive integer if this version * is less than , equal to or greater the the specified version * @ see # BUILD _ AWARE _ ORDER * @ see # compareWithBuildsTo ( Version other ) */ @ Override public int compareTo ( Version other ) { } }
int result = normal . compareTo ( other . normal ) ; if ( result == 0 ) { result = preRelease . compareTo ( other . preRelease ) ; } return result ;
public class StaticLog { /** * Error等级日志 < br > * 由于动态获取Log , 效率较低 , 建议在非频繁调用的情况下使用 ! ! * @ param e 需在日志中堆栈打印的异常 * @ param format 格式文本 , { } 代表变量 * @ param arguments 变量对应的参数 */ public static void error ( Throwable e , String format , Object ... arguments ) { } }
error ( LogFactory . indirectGet ( ) , e , format , arguments ) ;
public class ServiceHandler { /** * Get TopologyInfo , it contain all topology running data * @ return TopologyInfo */ @ Override public TopologyInfo getTopologyInfo ( String topologyId ) throws TException { } }
long start = System . nanoTime ( ) ; StormClusterState stormClusterState = data . getStormClusterState ( ) ; try { // get topology ' s StormBase StormBase base = stormClusterState . storm_base ( topologyId , null ) ; if ( base == null ) { throw new NotAliveException ( "No topology of " + topologyId ) ; } Assignment assignment = stormClusterState . assignment_info ( topologyId , null ) ; if ( assignment == null ) { throw new NotAliveException ( "No topology of " + topologyId ) ; } TopologyTaskHbInfo topologyTaskHbInfo = data . getTasksHeartbeat ( ) . get ( topologyId ) ; Map < Integer , TaskHeartbeat > taskHbMap = null ; if ( topologyTaskHbInfo != null ) taskHbMap = topologyTaskHbInfo . get_taskHbs ( ) ; Map < Integer , TaskInfo > taskInfoMap = Cluster . get_all_taskInfo ( stormClusterState , topologyId ) ; Map < Integer , String > taskToComponent = Cluster . get_all_task_component ( stormClusterState , topologyId , taskInfoMap ) ; Map < Integer , String > taskToType = Cluster . get_all_task_type ( stormClusterState , topologyId , taskInfoMap ) ; String errorString ; if ( Cluster . is_topology_exist_error ( stormClusterState , topologyId ) ) { errorString = "Y" ; } else { errorString = "" ; } TopologySummary topologySummary = new TopologySummary ( ) ; topologySummary . set_id ( topologyId ) ; topologySummary . set_name ( base . getStormName ( ) ) ; topologySummary . set_uptimeSecs ( TimeUtils . time_delta ( base . getLanchTimeSecs ( ) ) ) ; topologySummary . set_status ( base . getStatusString ( ) ) ; topologySummary . set_numTasks ( NimbusUtils . getTopologyTaskNum ( assignment ) ) ; topologySummary . set_numWorkers ( assignment . getWorkers ( ) . size ( ) ) ; topologySummary . set_errorInfo ( errorString ) ; Map < String , ComponentSummary > componentSummaryMap = new HashMap < > ( ) ; HashMap < String , List < Integer > > componentToTasks = JStormUtils . reverse_map ( taskToComponent ) ; for ( Entry < String , List < Integer > > entry : componentToTasks . entrySet ( ) ) { String name = entry . getKey ( ) ; List < Integer > taskIds = entry . getValue ( ) ; if ( taskIds == null || taskIds . size ( ) == 0 ) { LOG . warn ( "No task of component " + name ) ; continue ; } ComponentSummary componentSummary = new ComponentSummary ( ) ; componentSummaryMap . put ( name , componentSummary ) ; componentSummary . set_name ( name ) ; componentSummary . set_type ( taskToType . get ( taskIds . get ( 0 ) ) ) ; componentSummary . set_parallel ( taskIds . size ( ) ) ; componentSummary . set_taskIds ( taskIds ) ; } Map < Integer , TaskSummary > taskSummaryMap = new TreeMap < > ( ) ; Map < Integer , List < TaskError > > taskErrors = Cluster . get_all_task_errors ( stormClusterState , topologyId ) ; for ( Integer taskId : taskInfoMap . keySet ( ) ) { TaskSummary taskSummary = new TaskSummary ( ) ; taskSummaryMap . put ( taskId , taskSummary ) ; taskSummary . set_taskId ( taskId ) ; if ( taskHbMap == null ) { taskSummary . set_status ( "Starting" ) ; taskSummary . set_uptime ( 0 ) ; } else { TaskHeartbeat hb = taskHbMap . get ( taskId ) ; if ( hb == null ) { taskSummary . set_status ( "Starting" ) ; taskSummary . set_uptime ( 0 ) ; } else { boolean isInactive = NimbusUtils . isTaskDead ( data , topologyId , taskId ) ; if ( isInactive ) taskSummary . set_status ( "INACTIVE" ) ; else taskSummary . set_status ( "ACTIVE" ) ; taskSummary . set_uptime ( hb . get_uptime ( ) ) ; } } if ( StringUtils . isBlank ( errorString ) ) { continue ; } List < TaskError > taskErrorList = taskErrors . get ( taskId ) ; if ( taskErrorList != null && taskErrorList . size ( ) != 0 ) { for ( TaskError taskError : taskErrorList ) { ErrorInfo errorInfo = new ErrorInfo ( taskError . getError ( ) , taskError . getTimSecs ( ) , taskError . getLevel ( ) , taskError . getCode ( ) ) ; taskSummary . add_to_errors ( errorInfo ) ; String component = taskToComponent . get ( taskId ) ; componentSummaryMap . get ( component ) . add_to_errors ( errorInfo ) ; } } } for ( ResourceWorkerSlot workerSlot : assignment . getWorkers ( ) ) { String hostname = workerSlot . getHostname ( ) ; int port = workerSlot . getPort ( ) ; for ( Integer taskId : workerSlot . getTasks ( ) ) { TaskSummary taskSummary = taskSummaryMap . get ( taskId ) ; taskSummary . set_host ( hostname ) ; taskSummary . set_port ( port ) ; } } TopologyInfo topologyInfo = new TopologyInfo ( ) ; topologyInfo . set_topology ( topologySummary ) ; topologyInfo . set_components ( JStormUtils . mk_list ( componentSummaryMap . values ( ) ) ) ; topologyInfo . set_tasks ( JStormUtils . mk_list ( taskSummaryMap . values ( ) ) ) ; // return topology metric & component metric only List < MetricInfo > tpMetricList = data . getMetricCache ( ) . getMetricData ( topologyId , MetaType . TOPOLOGY ) ; List < MetricInfo > compMetricList = data . getMetricCache ( ) . getMetricData ( topologyId , MetaType . COMPONENT ) ; List < MetricInfo > workerMetricList = data . getMetricCache ( ) . getMetricData ( topologyId , MetaType . WORKER ) ; List < MetricInfo > compStreamMetricList = data . getMetricCache ( ) . getMetricData ( topologyId , MetaType . COMPONENT_STREAM ) ; MetricInfo taskMetric = MetricUtils . mkMetricInfo ( ) ; MetricInfo streamMetric = MetricUtils . mkMetricInfo ( ) ; MetricInfo nettyMetric = MetricUtils . mkMetricInfo ( ) ; MetricInfo tpMetric , compMetric , compStreamMetric , workerMetric ; if ( tpMetricList == null || tpMetricList . size ( ) == 0 ) { tpMetric = MetricUtils . mkMetricInfo ( ) ; } else { // get the last min topology metric tpMetric = tpMetricList . get ( tpMetricList . size ( ) - 1 ) ; } if ( compMetricList == null || compMetricList . size ( ) == 0 ) { compMetric = MetricUtils . mkMetricInfo ( ) ; } else { compMetric = compMetricList . get ( 0 ) ; } if ( compStreamMetricList == null || compStreamMetricList . size ( ) == 0 ) { compStreamMetric = MetricUtils . mkMetricInfo ( ) ; } else { compStreamMetric = compStreamMetricList . get ( 0 ) ; } if ( workerMetricList == null || workerMetricList . size ( ) == 0 ) { workerMetric = MetricUtils . mkMetricInfo ( ) ; } else { workerMetric = workerMetricList . get ( 0 ) ; } TopologyMetric topologyMetrics = new TopologyMetric ( tpMetric , compMetric , workerMetric , taskMetric , streamMetric , nettyMetric ) ; topologyMetrics . set_compStreamMetric ( compStreamMetric ) ; topologyInfo . set_metrics ( topologyMetrics ) ; return topologyInfo ; } catch ( TException e ) { LOG . info ( "Failed to get topologyInfo " + topologyId , e ) ; throw e ; } catch ( Exception e ) { LOG . info ( "Failed to get topologyInfo " + topologyId , e ) ; throw new TException ( "Failed to get topologyInfo" + topologyId ) ; } finally { long end = System . nanoTime ( ) ; SimpleJStormMetric . updateNimbusHistogram ( "getTopologyInfo" , ( end - start ) / TimeUtils . NS_PER_US ) ; }
public class FixedLengthDecodingState { /** * { @ inheritDoc } */ @ Override public DecodingState finishDecode ( ProtocolDecoderOutput out ) throws Exception { } }
IoBufferEx readData ; if ( buffer == null ) { readData = allocator . wrap ( allocator . allocate ( 0 ) ) ; } else { buffer . flip ( ) ; readData = buffer ; buffer = null ; } return finishDecode ( ( IoBuffer ) readData , out ) ;
public class AnnotationUtils { /** * Get the annotation metadata for the given element . * @ param element The element * @ return The { @ link AnnotationMetadata } */ public AnnotationMetadata getAnnotationMetadata ( Element element ) { } }
AnnotationMetadata metadata = annotationMetadataCache . get ( element ) ; if ( metadata == null ) { metadata = newAnnotationBuilder ( ) . build ( element ) ; annotationMetadataCache . put ( element , metadata ) ; } return metadata ;
public class CoFeedbackTransformation { /** * Adds a feedback edge . The parallelism of the { @ code StreamTransformation } must match * the parallelism of the input { @ code StreamTransformation } of the upstream * { @ code StreamTransformation } . * @ param transform The new feedback { @ code StreamTransformation } . */ public void addFeedbackEdge ( StreamTransformation < F > transform ) { } }
if ( transform . getParallelism ( ) != this . getParallelism ( ) ) { throw new UnsupportedOperationException ( "Parallelism of the feedback stream must match the parallelism of the original" + " stream. Parallelism of original stream: " + this . getParallelism ( ) + "; parallelism of feedback stream: " + transform . getParallelism ( ) ) ; } feedbackEdges . add ( transform ) ;
public class GroovyScriptEngine { /** * Initialize a new GroovyClassLoader with a default or * constructor - supplied parentClassLoader . * @ return the parent classloader used to load scripts */ private GroovyClassLoader initGroovyLoader ( ) { } }
return ( GroovyClassLoader ) AccessController . doPrivileged ( new PrivilegedAction ( ) { public Object run ( ) { if ( parentLoader instanceof GroovyClassLoader ) { return new ScriptClassLoader ( ( GroovyClassLoader ) parentLoader ) ; } else { return new ScriptClassLoader ( parentLoader , config ) ; } } } ) ;
public class GBSInsertFringe { /** * Balance a fringe following the addition of its final node . * @ param kFactor The K factor for the tree . * @ param stack The stack of nodes through which the insert operation * passed . * @ param fpoint The fringe balance point ( the top of the fringe ) . * @ param fpidx The index within the stack of fpoint * @ param maxBal Maximum allowed fringe imbalance . This is derived * from the K factor but has to be computed by the * caller anyway so it is passed along here . */ void balance ( int kFactor , NodeStack stack , GBSNode fpoint , int fpidx , int maxBal ) { } }
/* Get parent of balance point */ GBSNode bparent = stack . node ( fpidx - 1 ) ; switch ( kFactor ) { case 2 : balance2 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 4 : balance4 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 6 : balance6 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 8 : balance8 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 12 : balance12 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 16 : balance16 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 24 : balance24 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; case 32 : balance32 ( stack , bparent , fpoint , fpidx , maxBal ) ; break ; default : String x = "Unknown K factor in fringe balance: " + kFactor ; error ( x ) ; break ; }
public class DescendantAxis { /** * { @ inheritDoc } */ @ Override public boolean hasNext ( ) { } }
resetToLastKey ( ) ; // Fail if there is no node anymore . if ( mNextKey == NULL_NODE ) { resetToStartKey ( ) ; return false ; } moveTo ( mNextKey ) ; // Fail if the subtree is finished . if ( ( ( ITreeStructData ) getNode ( ) ) . getLeftSiblingKey ( ) == getStartKey ( ) ) { resetToStartKey ( ) ; return false ; } // Always follow first child if there is one . if ( ( ( ITreeStructData ) getNode ( ) ) . hasFirstChild ( ) ) { mNextKey = ( ( ITreeStructData ) getNode ( ) ) . getFirstChildKey ( ) ; if ( ( ( ITreeStructData ) getNode ( ) ) . hasRightSibling ( ) ) { mRightSiblingKeyStack . push ( ( ( ITreeStructData ) getNode ( ) ) . getRightSiblingKey ( ) ) ; } return true ; } // Then follow right sibling if there is one . if ( ( ( ITreeStructData ) getNode ( ) ) . hasRightSibling ( ) ) { mNextKey = ( ( ITreeStructData ) getNode ( ) ) . getRightSiblingKey ( ) ; return true ; } // Then follow right sibling on stack . if ( mRightSiblingKeyStack . size ( ) > 0 ) { mNextKey = mRightSiblingKeyStack . pop ( ) ; return true ; } // Then end . mNextKey = NULL_NODE ; return true ;
public class MultiVertexGeometryImpl { /** * Checked vs . Jan 11 , 2011 */ int QueryCoordinates ( Point3D [ ] dst , int dstSize , int beginIndex , int endIndex ) { } }
int endIndexC = endIndex < 0 ? m_pointCount : endIndex ; endIndexC = Math . min ( endIndexC , beginIndex + dstSize ) ; if ( beginIndex < 0 || beginIndex >= m_pointCount || endIndexC < beginIndex ) // TODO replace geometry exc throw new IllegalArgumentException ( ) ; AttributeStreamOfDbl xy = ( AttributeStreamOfDbl ) getAttributeStreamRef ( VertexDescription . Semantics . POSITION ) ; AttributeStreamOfDbl z = null ; double v = VertexDescription . getDefaultValue ( VertexDescription . Semantics . Z ) ; boolean bHasZ = hasAttribute ( VertexDescription . Semantics . Z ) ; if ( bHasZ ) z = ( AttributeStreamOfDbl ) getAttributeStreamRef ( VertexDescription . Semantics . Z ) ; int j = 0 ; for ( int i = beginIndex ; i < endIndexC ; i ++ , j ++ ) { dst [ j ] . x = xy . read ( 2 * i ) ; dst [ j ] . y = xy . read ( 2 * i + 1 ) ; dst [ j ] . z = bHasZ ? z . read ( i ) : v ; dst [ j ] = getXYZ ( i ) ; } return endIndexC ;
public class MoreWindows { /** * Performs an action . */ @ Override public void actionPerformed ( ActionEvent e ) { } }
String cmd = e . getActionCommand ( ) ; if ( cmd . equals ( "Cancel" ) ) { setVisible ( false ) ; value = null ; } else if ( cmd . equals ( "Select" ) ) { value = list . getSelectedValue ( ) ; setVisible ( false ) ; swingGui . showFileWindow ( value , - 1 ) ; }
public class DatasetSnippets { /** * [ TARGET list ( TableListOption . . . ) ] */ public Page < Table > list ( ) { } }
// [ START ] Page < Table > tables = dataset . list ( ) ; for ( Table table : tables . iterateAll ( ) ) { // do something with the table } // [ END ] return tables ;
public class ElytronExtension { /** * Gets whether the given { @ code resourceRegistration } is for a server , or if not , * is not for a resource in the { @ code profile } resource tree . */ static boolean isServerOrHostController ( ImmutableManagementResourceRegistration resourceRegistration ) { } }
return resourceRegistration . getProcessType ( ) . isServer ( ) || ! ModelDescriptionConstants . PROFILE . equals ( resourceRegistration . getPathAddress ( ) . getElement ( 0 ) . getKey ( ) ) ;
public class ST_Azimuth { /** * This code compute the angle in radian as postgis does . * @ author : Jose Martinez - Llario from JASPA . JAva SPAtial for SQL * @ param pointA * @ param pointB * @ return */ public static Double azimuth ( Geometry pointA , Geometry pointB ) { } }
if ( pointA == null || pointB == null ) { return null ; } if ( ( pointA instanceof Point ) && ( pointB instanceof Point ) ) { Double angle ; double x0 = ( ( Point ) pointA ) . getX ( ) ; double y0 = ( ( Point ) pointA ) . getY ( ) ; double x1 = ( ( Point ) pointB ) . getX ( ) ; double y1 = ( ( Point ) pointB ) . getY ( ) ; if ( x0 == x1 ) { if ( y0 < y1 ) { angle = 0.0 ; } else if ( y0 > y1 ) { angle = Math . PI ; } else { angle = null ; } } else if ( y0 == y1 ) { if ( x0 < x1 ) { angle = Math . PI / 2 ; } else if ( x0 > x1 ) { angle = Math . PI + ( Math . PI / 2 ) ; } else { angle = null ; } } else if ( x0 < x1 ) { if ( y0 < y1 ) { angle = Math . atan ( Math . abs ( x0 - x1 ) / Math . abs ( y0 - y1 ) ) ; } else { /* ( y0 > y1 ) - equality case handled above */ angle = Math . atan ( Math . abs ( y0 - y1 ) / Math . abs ( x0 - x1 ) ) + ( Math . PI / 2 ) ; } } else { /* ( x0 > x1 ) - equality case handled above */ if ( y0 > y1 ) { angle = Math . atan ( Math . abs ( x0 - x1 ) / Math . abs ( y0 - y1 ) ) + Math . PI ; } else { /* ( y0 < y1 ) - equality case handled above */ angle = Math . atan ( Math . abs ( y0 - y1 ) / Math . abs ( x0 - x1 ) ) + ( Math . PI + ( Math . PI / 2 ) ) ; } } return angle ; } return null ;
public class TypeUtils { /** * Wrap the specified { @ link Type } in a { @ link Typed } wrapper . * @ param < T > inferred generic type * @ param type to wrap * @ return Typed & lt ; T & gt ; * @ since 3.2 */ public static < T > Typed < T > wrap ( final Type type ) { } }
return new Typed < T > ( ) { @ Override public Type getType ( ) { return type ; } } ;
public class InternalSARLParser { /** * InternalSARL . g : 16872:1 : ruleQualifiedNameInStaticImport returns [ AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ] : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + ; */ public final AntlrDatatypeRuleToken ruleQualifiedNameInStaticImport ( ) throws RecognitionException { } }
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ; Token kw = null ; AntlrDatatypeRuleToken this_ValidID_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 16878:2 : ( ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + ) // InternalSARL . g : 16879:2 : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + { // InternalSARL . g : 16879:2 : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + int cnt386 = 0 ; loop386 : do { int alt386 = 2 ; switch ( input . LA ( 1 ) ) { case RULE_ID : { int LA386_2 = input . LA ( 2 ) ; if ( ( LA386_2 == 77 ) ) { alt386 = 1 ; } } break ; case 92 : { int LA386_3 = input . LA ( 2 ) ; if ( ( LA386_3 == 77 ) ) { alt386 = 1 ; } } break ; case 44 : { int LA386_4 = input . LA ( 2 ) ; if ( ( LA386_4 == 77 ) ) { alt386 = 1 ; } } break ; case 93 : { int LA386_5 = input . LA ( 2 ) ; if ( ( LA386_5 == 77 ) ) { alt386 = 1 ; } } break ; case 94 : { int LA386_6 = input . LA ( 2 ) ; if ( ( LA386_6 == 77 ) ) { alt386 = 1 ; } } break ; case 95 : { int LA386_7 = input . LA ( 2 ) ; if ( ( LA386_7 == 77 ) ) { alt386 = 1 ; } } break ; } switch ( alt386 ) { case 1 : // InternalSARL . g : 16880:3 : this _ ValidID _ 0 = ruleValidID kw = ' . ' { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getQualifiedNameInStaticImportAccess ( ) . getValidIDParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_156 ) ; this_ValidID_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( this_ValidID_0 ) ; } if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } kw = ( Token ) match ( input , 77 , FOLLOW_161 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( kw ) ; newLeafNode ( kw , grammarAccess . getQualifiedNameInStaticImportAccess ( ) . getFullStopKeyword_1 ( ) ) ; } } break ; default : if ( cnt386 >= 1 ) break loop386 ; if ( state . backtracking > 0 ) { state . failed = true ; return current ; } EarlyExitException eee = new EarlyExitException ( 386 , input ) ; throw eee ; } cnt386 ++ ; } while ( true ) ; } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class GroupDeviceElement { /** * Dump element */ @ Override void dump_i ( final int indent_level ) { } }
for ( int i = 0 ; i < indent_level ; i ++ ) { System . out . print ( " " ) ; } System . out . println ( "`-> Device: " + get_name ( ) ) ;
public class GridBy { /** * Creates an XPath expression - segment that will find a row , selecting the row based on the * text in a specific column . * @ param selectIndex index of the column to find value in ( usually obtained via { @ link # getXPathForColumnIndex ( String ) } ) . * @ param value text to find in the column . * @ return XPath expression selecting a tr in the table . */ public static String getXPathForRowByValueInOtherColumn ( String selectIndex , String value ) { } }
return String . format ( "/tr[td[%1$s]/descendant-or-self::text()[normalized(.)='%2$s']]" , selectIndex , value ) ;
public class Bbox { /** * Computes the intersection of this bounding box with the specified bounding box . * @ param other * Another Bbox . * @ return bounding box of intersection or null if they do not intersect . */ public Bbox intersection ( Bbox other ) { } }
if ( ! this . intersects ( other ) ) { return null ; } else { double minx = other . getX ( ) > this . getX ( ) ? other . getX ( ) : this . getX ( ) ; double maxx = other . getEndPoint ( ) . getX ( ) < this . getEndPoint ( ) . getX ( ) ? other . getEndPoint ( ) . getX ( ) : this . getEndPoint ( ) . getX ( ) ; double miny = other . getY ( ) > this . getY ( ) ? other . getY ( ) : this . getY ( ) ; double maxy = other . getEndPoint ( ) . getY ( ) < this . getEndPoint ( ) . getY ( ) ? other . getEndPoint ( ) . getY ( ) : this . getEndPoint ( ) . getY ( ) ; return new Bbox ( minx , miny , ( maxx - minx ) , ( maxy - miny ) ) ; }
public class PieChart { /** * This is called during layout when the size of this view has changed . If * you were just added to the view hierarchy , you ' re called with the old * values of 0. * @ param w Current width of this view . * @ param h Current height of this view . * @ param oldw Old width of this view . * @ param oldh Old height of this view . */ @ Override protected void onSizeChanged ( int w , int h , int oldw , int oldh ) { } }
super . onSizeChanged ( w , h , oldw , oldh ) ; mGraph . setPivot ( mGraphBounds . centerX ( ) , mGraphBounds . centerY ( ) ) ; onDataChanged ( ) ;
public class CmsGwtService { /** * Clears the objects stored in thread local . < p > */ protected void clearThreadStorage ( ) { } }
if ( m_perThreadCmsObject != null ) { m_perThreadCmsObject . remove ( ) ; } if ( perThreadRequest != null ) { perThreadRequest . remove ( ) ; } if ( perThreadResponse != null ) { perThreadResponse . remove ( ) ; }
public class Metadata { /** * Get a value from a multiselect metadata field . * @ param path the key path in the metadata object . Must be prefixed with a " / " . * @ return the list of values set in the field . */ public List < String > getMultiSelect ( String path ) { } }
List < String > values = new ArrayList < String > ( ) ; for ( JsonValue val : this . getValue ( path ) . asArray ( ) ) { values . add ( val . asString ( ) ) ; } return values ;
public class BeanCopier { /** * Map转Bean属性拷贝 * @ param map Map * @ param bean Bean */ private void mapToBean ( Map < ? , ? > map , Object bean ) { } }
valueProviderToBean ( new MapValueProvider ( map , this . copyOptions . ignoreCase ) , bean ) ;
public class XSynchronizedExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setExpression ( XExpression newExpression ) { } }
if ( newExpression != expression ) { NotificationChain msgs = null ; if ( expression != null ) msgs = ( ( InternalEObject ) expression ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XSYNCHRONIZED_EXPRESSION__EXPRESSION , null , msgs ) ; if ( newExpression != null ) msgs = ( ( InternalEObject ) newExpression ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XSYNCHRONIZED_EXPRESSION__EXPRESSION , null , msgs ) ; msgs = basicSetExpression ( newExpression , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XSYNCHRONIZED_EXPRESSION__EXPRESSION , newExpression , newExpression ) ) ;
public class InjectionBinding { /** * d730349.1 */ void metadataProcessingInitialize ( ComponentNameSpaceConfiguration nameSpaceConfig ) { } }
ivContext = ( InjectionProcessorContext ) nameSpaceConfig . getInjectionProcessorContext ( ) ; ivNameSpaceConfig = nameSpaceConfig ; // Following must be available after ivContext and ivNameSpaceConfig are cleared ivCheckAppConfig = nameSpaceConfig . isCheckApplicationConfiguration ( ) ;
public class HttpUtil { /** * 下载远程文本 * @ param url 请求的url * @ param customCharset 自定义的字符集 , 可以使用 { @ link CharsetUtil # charset } 方法转换 * @ param streamPress 进度条 { @ link StreamProgress } * @ return 文本 */ public static String downloadString ( String url , Charset customCharset , StreamProgress streamPress ) { } }
if ( StrUtil . isBlank ( url ) ) { throw new NullPointerException ( "[url] is null!" ) ; } FastByteArrayOutputStream out = new FastByteArrayOutputStream ( ) ; download ( url , out , true , streamPress ) ; return null == customCharset ? out . toString ( ) : out . toString ( customCharset ) ;
public class ArrayUtil { /** * Fills the array with a value . */ public static void fillArray ( Object [ ] array , Object value ) { } }
int to = array . length ; while ( -- to >= 0 ) { array [ to ] = value ; }
public class EntityCapsManager { /** * Returns true if Entity Caps are supported by a given JID . * @ param jid * @ return true if the entity supports Entity Capabilities . * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public boolean areEntityCapsSupported ( Jid jid ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return sdm . supportsFeature ( jid , NAMESPACE ) ;
public class ProjectStatusesBase { /** * Deletes a specific , existing project status update . * Returns an empty data record . * @ param projectStatus The project status update to delete . * @ return Request object */ public ItemRequest < ProjectStatus > delete ( String projectStatus ) { } }
String path = String . format ( "/project_statuses/%s" , projectStatus ) ; return new ItemRequest < ProjectStatus > ( this , ProjectStatus . class , path , "DELETE" ) ;