signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class KamImpl { /** * { @ inheritDoc } */ @ Override public Set < KamNode > getAdjacentNodes ( KamNode kamNode ) { } }
return getAdjacentNodes ( kamNode , EdgeDirectionType . BOTH , null , null ) ;
public class Agg { /** * Get a { @ link Collector } that calculates the < code > FIRST < / code > function . * Note that unlike in ( Oracle ) SQL , where the < code > FIRST < / code > function * is an ordered set aggregate function that produces a set of results , this * collector just produces the first value in the order of stream traversal . * For matching behaviour to Oracle ' s [ aggregate function ] KEEP * ( DENSE _ RANK FIRST ORDER BY . . . ) , use { @ link # maxAll ( Comparator ) } instead . */ public static < T > Collector < T , ? , Optional < T > > first ( ) { } }
return Collectors . reducing ( ( v1 , v2 ) -> v1 ) ;
public class JavaUtils { /** * Liberty change start */ @ FFDCIgnore ( NumberFormatException . class ) private static boolean determineIsJava8Before161 ( String version ) { } }
try { return version != null && version . startsWith ( "1.8.0_" ) && Integer . parseInt ( version . substring ( 6 ) ) < 161 ; } catch ( NumberFormatException ex ) { // no - op } return false ;
public class Normalizer { /** * Check whether the given character sequence < code > src < / code > is normalized * according to the normalization method < code > form < / code > . * @ param src character sequence to check * @ param form normalization form to check against * @ return true if normalized according to < code > form < / code > */ public static boolean isNormalized ( CharSequence src , Form form ) { } }
return normalize ( src , form ) . equals ( src ) ;
public class MetaMediaManager { /** * Our media front end should implement { @ link FrameParticipant } and call this method in their * { @ link FrameParticipant # tick } method . They must also first check { @ link # isPaused } and not * call this method if we are paused . As they will probably want to have willTick ( ) and * didTick ( ) calldown methods , we cannot handle pausedness for them . */ public void tick ( long tickStamp ) { } }
// now tick our animations and sprites _animmgr . tick ( tickStamp ) ; _spritemgr . tick ( tickStamp ) ; // if performance debugging is enabled , if ( FrameManager . _perfDebug . getValue ( ) ) { if ( _perfLabel == null ) { _perfLabel = new Label ( "" , Label . OUTLINE , Color . white , Color . black , new Font ( "Arial" , Font . PLAIN , 10 ) ) ; } if ( _perfRect == null ) { _perfRect = new Rectangle ( 5 , 5 , 0 , 0 ) ; } StringBuilder perf = new StringBuilder ( ) ; perf . append ( "[FPS: " ) ; perf . append ( _framemgr . getPerfTicks ( ) ) . append ( "/" ) ; perf . append ( _framemgr . getPerfTries ( ) ) ; perf . append ( " PM:" ) ; StringUtil . toString ( perf , _framemgr . getPerfMetrics ( ) ) ; // perf . append ( " MP : " ) . append ( _ dirtyPerTick ) ; perf . append ( "]" ) ; String perfStatus = perf . toString ( ) ; if ( ! _perfStatus . equals ( perfStatus ) ) { _perfStatus = perfStatus ; _perfLabel . setText ( perfStatus ) ; Graphics2D gfx = _host . createGraphics ( ) ; if ( gfx != null ) { _perfLabel . layout ( gfx ) ; gfx . dispose ( ) ; // make sure the region we dirty contains the old and the new text ( which we // ensure by never letting the rect shrink ) Dimension psize = _perfLabel . getSize ( ) ; _perfRect . width = Math . max ( _perfRect . width , psize . width ) ; _perfRect . height = Math . max ( _perfRect . height , psize . height ) ; _remgr . addDirtyRegion ( new Rectangle ( _perfRect ) ) ; } } } else { _perfRect = null ; }
public class HttpConnection { /** * Attempts to shutdown the { @ link Socket } ' s output , via Socket . shutdownOutput ( ) * when running on JVM 1.3 or higher . * @ deprecated unused */ @ Deprecated public void shutdownOutput ( ) { } }
LOG . trace ( "enter HttpConnection.shutdownOutput()" ) ; try { socket . shutdownOutput ( ) ; } catch ( Exception ex ) { LOG . debug ( "Unexpected Exception caught" , ex ) ; // Ignore , and hope everything goes right } // close output stream ?
public class SettingsImpl { /** * Get the inputrc file , if not set it defaults to : * System . getProperty ( " user . home " ) + Config . getPathSeparator ( ) + " . inputrc " * @ return inputrc */ @ Override public File inputrc ( ) { } }
if ( inputrc == null ) { inputrc = new File ( System . getProperty ( "user.home" ) + Config . getPathSeparator ( ) + ".inputrc" ) ; } return inputrc ;
public class CmsUserIconHelper { /** * Returns the file suffix . < p > * @ param fileName the file name * @ return the suffix */ private String getSuffix ( String fileName ) { } }
int index = fileName . lastIndexOf ( "." ) ; if ( index > 0 ) { return fileName . substring ( index ) ; } else { return fileName ; }
public class Symbol { /** * An accessor method for the attributes of this symbol . * Attributes of class symbols should be accessed through the accessor * method to make sure that the class symbol is loaded . */ public List < Attribute . Compound > getRawAttributes ( ) { } }
return ( metadata == null ) ? List . < Attribute . Compound > nil ( ) : metadata . getDeclarationAttributes ( ) ;
public class DesktopPane { /** * ( non - Javadoc ) * @ see javax . swing . Icon # paintIcon ( java . awt . Component , java . awt . Graphics , int , int ) */ @ Override public void paintIcon ( Component c , Graphics g , int x , int y ) { } }
Graphics2D g2d = ( Graphics2D ) g . create ( ) ; g2d . translate ( x , y ) ; double coef1 = ( double ) this . width / ( double ) getOrigWidth ( ) ; double coef2 = ( double ) this . height / ( double ) getOrigHeight ( ) ; g2d . scale ( coef1 , coef2 ) ; paint ( g2d ) ; g2d . dispose ( ) ;
public class DijkstraSSSP { /** * Start the search . This method may only be invoked once . */ public void findSSSP ( ) { } }
Record < N , E > initRec = new Record < > ( init , 0.0f ) ; if ( records . put ( init , initRec ) != null ) { throw new IllegalStateException ( "Search has already been performed!" ) ; } SmartDynamicPriorityQueue < Record < N , E > > pq = BinaryHeap . create ( graph . size ( ) ) ; initRec . ref = pq . referencedAdd ( initRec ) ; while ( ! pq . isEmpty ( ) ) { // Remove node with minimum distance Record < N , E > rec = pq . extractMin ( ) ; float dist = rec . dist ; N node = rec . node ; // edge scanning for ( E edge : graph . getOutgoingEdges ( node ) ) { float w = edgeWeights . getEdgeWeight ( edge ) ; float newDist = dist + w ; N tgt = graph . getTarget ( edge ) ; Record < N , E > tgtRec = records . get ( tgt ) ; if ( tgtRec == null ) { // node has not been visited before , add a record // and add it to the queue tgtRec = new Record < > ( tgt , newDist , edge , rec ) ; tgtRec . ref = pq . referencedAdd ( tgtRec ) ; records . put ( tgt , tgtRec ) ; } else if ( newDist < tgtRec . dist ) { // using currently considered edge decreases current distance tgtRec . dist = newDist ; tgtRec . reach = edge ; tgtRec . depth = rec . depth + 1 ; tgtRec . parent = rec ; // update it ' s position in the queue pq . keyChanged ( tgtRec . ref ) ; } } }
public class ConvexHull { /** * Returns true if the given path of the input MultiPath is convex . Returns false otherwise . * \ param multi _ path The MultiPath to check if the path is convex . * \ param path _ index The path of the MultiPath to check if its convex . */ static boolean isPathConvex ( MultiPath multi_path , int path_index , ProgressTracker progress_tracker ) { } }
MultiPathImpl mimpl = ( MultiPathImpl ) multi_path . _getImpl ( ) ; int path_start = mimpl . getPathStart ( path_index ) ; int path_end = mimpl . getPathEnd ( path_index ) ; boolean bxyclosed = ! mimpl . isClosedPath ( path_index ) && mimpl . isClosedPathInXYPlane ( path_index ) ; AttributeStreamOfDbl position = ( AttributeStreamOfDbl ) ( mimpl . getAttributeStreamRef ( VertexDescription . Semantics . POSITION ) ) ; int position_start = 2 * path_start ; int position_end = 2 * path_end ; if ( bxyclosed ) position_end -= 2 ; if ( position_end - position_start < 6 ) return true ; // This matches the logic for case 1 of the tree hull algorithm . The idea is inductive . We assume we have a convex hull pt _ 0 , . . . , pt _ m , and we see if // a new point ( pt _ pivot ) is among the transitive tournament for pt _ 0 , knowing that pt _ pivot comes after pt _ m . // We check three conditions : // 1 ) pt _ m - > pt _ pivot - > pt _ 0 is clockwise ( closure across the boundary is convex ) // 2 ) pt _ 1 - > pt _ pivot - > pt _ 0 is clockwise ( the first step forward is convex ) ( pt _ 1 is the next point after pt _ 0) // 3 ) pt _ m - > pt _ pivot - > pt _ m _ prev is clockwise ( the first step backwards is convex ) ( pt _ m _ prev is the previous point before pt _ m ) // If all three of the above conditions are clockwise , then pt _ pivot is among the transitive tournament for pt _ 0 , and therefore the polygon pt _ 0 , . . . , pt _ m , pt _ pivot is convex . Point2D pt_0 = new Point2D ( ) , pt_m = new Point2D ( ) , pt_pivot = new Point2D ( ) ; position . read ( position_start , pt_0 ) ; position . read ( position_start + 2 , pt_m ) ; position . read ( position_start + 4 , pt_pivot ) ; // Initial inductive step ECoordinate det_ec = determinant_ ( pt_m , pt_pivot , pt_0 ) ; if ( det_ec . isFuzzyZero ( ) || ! isClockwise_ ( det_ec . value ( ) ) ) return false ; Point2D pt_1 = new Point2D ( pt_m . x , pt_m . y ) ; Point2D pt_m_prev = new Point2D ( ) ; // Assume that pt _ 0 , . . . , pt _ m is convex . Check if the next point , pt _ pivot , maintains the convex invariant . for ( int i = position_start + 6 ; i < position_end ; i += 2 ) { pt_m_prev . setCoords ( pt_m ) ; pt_m . setCoords ( pt_pivot ) ; position . read ( i , pt_pivot ) ; det_ec = determinant_ ( pt_m , pt_pivot , pt_0 ) ; if ( det_ec . isFuzzyZero ( ) || ! isClockwise_ ( det_ec . value ( ) ) ) return false ; det_ec = determinant_ ( pt_1 , pt_pivot , pt_0 ) ; if ( det_ec . isFuzzyZero ( ) || ! isClockwise_ ( det_ec . value ( ) ) ) return false ; det_ec = determinant_ ( pt_m , pt_pivot , pt_m_prev ) ; if ( det_ec . isFuzzyZero ( ) || ! isClockwise_ ( det_ec . value ( ) ) ) return false ; } return true ;
public class Tuple15 { /** * Skip 7 degrees from this tuple . */ public final Tuple8 < T8 , T9 , T10 , T11 , T12 , T13 , T14 , T15 > skip7 ( ) { } }
return new Tuple8 < > ( v8 , v9 , v10 , v11 , v12 , v13 , v14 , v15 ) ;
public class TranscoderDB { /** * / * get _ transcoder _ entry */ public static Entry getEntry ( byte [ ] source , byte [ ] destination ) { } }
CaseInsensitiveBytesHash < Entry > sHash = transcoders . get ( source ) ; return sHash == null ? null : sHash . get ( destination ) ;
public class LifecycleInjector { /** * Create an injector that is a child of the bootstrap bindings only * @ param modules binding modules * @ return injector */ public Injector createChildInjector ( Collection < Module > modules ) { } }
Injector childInjector ; Collection < Module > localModules = modules ; for ( ModuleTransformer transformer : transformers ) { localModules = transformer . call ( localModules ) ; } // noinspection deprecation if ( mode == LifecycleInjectorMode . REAL_CHILD_INJECTORS ) { childInjector = injector . createChildInjector ( localModules ) ; } else { childInjector = createSimulatedChildInjector ( localModules ) ; } for ( PostInjectorAction action : actions ) { action . call ( childInjector ) ; } return childInjector ;
public class ListFunctionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListFunctionsRequest listFunctionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listFunctionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listFunctionsRequest . getMasterRegion ( ) , MASTERREGION_BINDING ) ; protocolMarshaller . marshall ( listFunctionsRequest . getFunctionVersion ( ) , FUNCTIONVERSION_BINDING ) ; protocolMarshaller . marshall ( listFunctionsRequest . getMarker ( ) , MARKER_BINDING ) ; protocolMarshaller . marshall ( listFunctionsRequest . getMaxItems ( ) , MAXITEMS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BeanPropertyAccessor { /** * Returns a new or cached BeanPropertyAccessor for the given class . */ public static < B > BeanPropertyAccessor < B > forClass ( Class < B > clazz ) { } }
return forClass ( clazz , PropertySet . ALL ) ;
public class DirectoryServiceClient { /** * Get the Service . * @ param serviceName * the serviceName . * @ param watcher * the Watcher . * @ return * the ModelService . */ public ModelService getService ( String serviceName , Watcher watcher ) { } }
WatcherRegistration wcb = null ; if ( watcher != null ) { wcb = new WatcherRegistration ( serviceName , watcher ) ; } ProtocolHeader header = new ProtocolHeader ( ) ; header . setType ( ProtocolType . GetService ) ; GetServiceProtocol p = new GetServiceProtocol ( serviceName ) ; p . setWatcher ( watcher != null ) ; GetServiceResponse resp ; resp = ( GetServiceResponse ) connection . submitRequest ( header , p , wcb ) ; return resp . getService ( ) ;
public class EasterRule { /** * Return true if the given Date is on the same day as Easter */ @ Override public boolean isOn ( Date date ) { } }
synchronized ( calendar ) { calendar . setTime ( date ) ; int dayOfYear = calendar . get ( Calendar . DAY_OF_YEAR ) ; calendar . setTime ( computeInYear ( calendar . getTime ( ) , calendar ) ) ; return calendar . get ( Calendar . DAY_OF_YEAR ) == dayOfYear ; }
public class RootDocImpl { /** * Do lazy initialization of " documentation " string . */ @ Override protected String documentation ( ) { } }
if ( documentation == null ) { JavaFileObject overviewPath = getOverviewPath ( ) ; if ( overviewPath == null ) { // no doc file to be had documentation = "" ; } else { // read from file try { documentation = readHTMLDocumentation ( overviewPath . openInputStream ( ) , overviewPath ) ; } catch ( IOException exc ) { documentation = "" ; env . error ( null , "javadoc.File_Read_Error" , overviewPath . getName ( ) ) ; } } } return documentation ;
public class PropertiesWriter { /** * Converts unicodes to encoded & # 92 ; uxxxx and escapes special characters * with a preceding slash */ private String saveConvert ( String theString , boolean escapeSpace ) { } }
int len = theString . length ( ) ; int bufLen = len * 2 ; if ( bufLen < 0 ) { bufLen = Integer . MAX_VALUE ; } StringBuffer outBuffer = new StringBuffer ( bufLen ) ; for ( int x = 0 ; x < len ; x ++ ) { char aChar = theString . charAt ( x ) ; // Handle common case first , selecting largest block that // avoids the specials below if ( ( aChar > 61 ) && ( aChar < 127 ) ) { if ( aChar == '\\' ) { outBuffer . append ( '\\' ) ; outBuffer . append ( '\\' ) ; continue ; } outBuffer . append ( aChar ) ; continue ; } switch ( aChar ) { case ' ' : if ( x == 0 || escapeSpace ) outBuffer . append ( '\\' ) ; outBuffer . append ( ' ' ) ; break ; case '\t' : outBuffer . append ( '\\' ) ; outBuffer . append ( 't' ) ; break ; case '\n' : outBuffer . append ( '\\' ) ; outBuffer . append ( 'n' ) ; break ; case '\r' : outBuffer . append ( '\\' ) ; outBuffer . append ( 'r' ) ; break ; case '\f' : outBuffer . append ( '\\' ) ; outBuffer . append ( 'f' ) ; break ; case '=' : // Fall through case ':' : // Fall through case '#' : // Fall through case '!' : outBuffer . append ( '\\' ) ; outBuffer . append ( aChar ) ; break ; default : outBuffer . append ( aChar ) ; } } return outBuffer . toString ( ) ;
public class ScrollAnimator { /** * Perform the " translateY " animation using the new scroll position and the old scroll position to show or hide * the animated view . * @ param oldScrollPosition * @ param newScrollPosition */ private void onScrollPositionChanged ( int oldScrollPosition , int newScrollPosition ) { } }
int newScrollDirection ; if ( newScrollPosition < oldScrollPosition ) { newScrollDirection = SCROLL_TO_TOP ; } else { newScrollDirection = SCROLL_TO_BOTTOM ; } if ( directionHasChanged ( newScrollDirection ) ) { translateYAnimatedView ( newScrollDirection ) ; }
public class CreateTeaseTrainingSet { /** * read the data set */ private void run ( ) throws IOException { } }
PrintWriter pw = new PrintWriter ( new FileWriter ( outputFile ) ) ; LineNumberReader lr = new LineNumberReader ( new FileReader ( inputFile ) ) ; String line = null ; int j = 0 ; // String t = null ; while ( ( line = lr . readLine ( ) ) != null ) { logger . info ( j + " " + line ) ; pw . print ( "1" ) ; pw . print ( "\t" ) ; pw . print ( ++ j ) ; pw . print ( ".tease\t" ) ; String [ ] s = line . split ( " " ) ; for ( int i = 0 ; i < s . length - 1 ; i ++ ) { String [ ] t = s [ i ] . split ( "/" ) ; if ( i != 0 ) pw . print ( " " ) ; pw . print ( i ) ; pw . print ( "&&" ) ; pw . print ( t [ 0 ] ) ; // form pw . print ( "&&" ) ; pw . print ( t [ 1 ] ) ; // lemma pw . print ( "&&" ) ; pw . print ( t [ 2 ] ) ; // pos pw . print ( "&&" ) ; pw . print ( t [ 3 ] ) ; // NE | O pw . print ( "&&" ) ; pw . print ( t [ 4 ] ) ; // target | O } pw . print ( "\n" ) ; } // end while lr . close ( ) ; pw . flush ( ) ; pw . close ( ) ;
public class PhaseThreeApplication { /** * Logic to recover from a failed protein family document . * @ param pn * @ param bldr * @ param pfLocation * @ param e * @ return */ private ProtoNetwork failProteinFamilies ( final ProtoNetwork pn , final StringBuilder bldr , String pfLocation , String errorMessage ) { } }
bldr . append ( "PROTEIN FAMILY RESOLUTION FAILURE in " ) ; bldr . append ( pfLocation ) ; bldr . append ( "\n\treason: " ) ; bldr . append ( errorMessage ) ; stageWarning ( bldr . toString ( ) ) ; // could not resolve protein family resource so return original // proto network . return pn ;
public class BindDataSourceSubProcessor { /** * Process second round . * @ param annotations * the annotations * @ param roundEnv * the round env * @ return true , if successful */ public boolean processSecondRound ( Set < ? extends TypeElement > annotations , RoundEnvironment roundEnv ) { } }
for ( SQLiteDatabaseSchema schema : schemas ) { // Analyze beans BEFORE daos , because beans are needed for DAO // definition for ( String daoName : schema . getDaoNameSet ( ) ) { // check dao into bean definition if ( globalDaoGenerated . contains ( daoName ) ) { createSQLEntityFromDao ( schema , schema . getElement ( ) , daoName ) ; createSQLDaoDefinition ( schema , globalBeanElements , globalDaoElements , daoName ) ; } } // end foreach bean } // end foreach dataSource return true ;
public class SliderBar { /** * Draw the tick along the line . */ private void drawTicks ( ) { } }
// Abort if not attached if ( ! isAttached ( ) ) { return ; } // Draw the ticks int lineWidth = lineElement . getOffsetWidth ( ) ; if ( numTicks > 0 ) { // Create the ticks or make them visible for ( int i = 0 ; i <= numTicks ; i ++ ) { Element tick = null ; if ( i < tickElements . size ( ) ) { tick = tickElements . get ( i ) ; } else { // Create the new tick tick = DOM . createDiv ( ) ; DOM . setStyleAttribute ( tick , "position" , "absolute" ) ; DOM . setStyleAttribute ( tick , "display" , "none" ) ; DOM . appendChild ( getElement ( ) , tick ) ; tickElements . add ( tick ) ; } if ( enabled ) { DOM . setElementProperty ( tick , "className" , SLIDER_TICK ) ; } else { DOM . setElementProperty ( tick , "className" , SLIDER_TICK + " " + SLIDER_TICK_SLIDING ) ; } // Position the tick and make it visible DOM . setStyleAttribute ( tick , "visibility" , "hidden" ) ; DOM . setStyleAttribute ( tick , "display" , "" ) ; int tickWidth = tick . getOffsetWidth ( ) ; int tickLeftOffset = lineLeftOffset + ( lineWidth * i / numTicks ) - ( tickWidth / 2 ) ; tickLeftOffset = Math . min ( tickLeftOffset , lineLeftOffset + lineWidth - tickWidth ) ; DOM . setStyleAttribute ( tick , "left" , tickLeftOffset + "px" ) ; DOM . setStyleAttribute ( tick , "visibility" , "visible" ) ; } // Hide unused ticks for ( int i = ( numTicks + 1 ) ; i < tickElements . size ( ) ; i ++ ) { DOM . setStyleAttribute ( tickElements . get ( i ) , "display" , "none" ) ; } } else { // Hide all ticks for ( Element elem : tickElements ) { DOM . setStyleAttribute ( elem , "display" , "none" ) ; } }
public class Splash { /** * Paints the image on the window . */ public void paint ( Graphics g ) { } }
g . drawImage ( image , 0 , 0 , this ) ; // Notify method splash that the window has been painted . if ( ! paintCalled ) { paintCalled = true ; synchronized ( this ) { notifyAll ( ) ; } }
public class CmsXmlPage { /** * Converts the XML structure of the pre 5.5.0 development version of * the XML page to the final 6.0 version . < p > */ private void convertOldDocument ( ) { } }
Document newDocument = DocumentHelper . createDocument ( ) ; Element root = newDocument . addElement ( NODE_PAGES ) ; root . add ( I_CmsXmlSchemaType . XSI_NAMESPACE ) ; root . addAttribute ( I_CmsXmlSchemaType . XSI_NAMESPACE_ATTRIBUTE_NO_SCHEMA_LOCATION , XMLPAGE_XSD_SYSTEM_ID ) ; Map < String , Element > pages = new HashMap < String , Element > ( ) ; if ( ( m_document . getRootElement ( ) != null ) && ( m_document . getRootElement ( ) . element ( NODE_ELEMENTS ) != null ) ) { for ( Iterator < Element > i = CmsXmlGenericWrapper . elementIterator ( m_document . getRootElement ( ) . element ( NODE_ELEMENTS ) , NODE_ELEMENT ) ; i . hasNext ( ) ; ) { Element elem = i . next ( ) ; try { String elementName = elem . attributeValue ( ATTRIBUTE_NAME ) ; String elementLang = elem . attributeValue ( ATTRIBUTE_LANGUAGE ) ; String elementEnabled = elem . attributeValue ( ATTRIBUTE_ENABLED ) ; boolean enabled = ( elementEnabled == null ) ? true : Boolean . valueOf ( elementEnabled ) . booleanValue ( ) ; Element page = pages . get ( elementLang ) ; if ( page == null ) { // no page available for the language , add one page = root . addElement ( NODE_PAGE ) . addAttribute ( ATTRIBUTE_LANGUAGE , elementLang ) ; pages . put ( elementLang , page ) ; } Element newElement = page . addElement ( NODE_ELEMENT ) . addAttribute ( ATTRIBUTE_NAME , elementName ) ; if ( ! enabled ) { newElement . addAttribute ( ATTRIBUTE_ENABLED , String . valueOf ( enabled ) ) ; } Element links = elem . element ( NODE_LINKS ) ; if ( links != null ) { newElement . add ( links . createCopy ( ) ) ; } Element content = elem . element ( NODE_CONTENT ) ; if ( content != null ) { newElement . add ( content . createCopy ( ) ) ; } } catch ( NullPointerException e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . ERR_XML_PAGE_CONVERT_CONTENT_0 ) , e ) ; } } } // now replace the old with the new document m_document = newDocument ;
public class HadoopResourceFinder { /** * Finds the file with the specified name and returns a reader for that * files contents . * @ param fileName the name of a file * @ return a { @ code BufferedReader } to the contents of the specified file * @ throws IOException if the resource cannot be found or if an error occurs * while opening the resource */ public BufferedReader open ( String fileName ) throws IOException { } }
Path filePath = new Path ( fileName ) ; if ( ! hadoopFs . exists ( filePath ) ) { throw new IOException ( fileName + " does not exist in HDFS" ) ; } BufferedReader br = new BufferedReader ( new InputStreamReader ( hadoopFs . open ( filePath ) ) ) ; return br ;
public class TinyPlugz { /** * Default implementation for { @ link # getResource ( String ) } building upon * result of { @ link # getClassLoader ( ) } . * @ param name Name of the resource . * @ return An url to the resource . */ protected final Optional < URL > defaultGetResource ( String name ) { } }
return Optional . ofNullable ( getClassLoader ( ) . getResource ( name ) ) ;
public class Result { /** * For UPDATE _ RESULT results * The parameters are set by this method as the Result is reused */ public void setPreparedResultUpdateProperties ( Object [ ] parameterValues ) { } }
if ( navigator . getSize ( ) == 1 ) { ( ( RowSetNavigatorClient ) navigator ) . setData ( 0 , parameterValues ) ; } else { navigator . clear ( ) ; navigator . add ( parameterValues ) ; }
public class DefaultGroovyMethods { /** * Multiply a BigDecimal and a Double . * Note : This method was added to enforce the Groovy rule of * BigDecimal * Double = = Double . Without this method , the * multiply ( BigDecimal ) method in BigDecimal would respond * and return a BigDecimal instead . Since BigDecimal is preferred * over Number , the Number * Number method is not chosen as in older * versions of Groovy . * @ param left a BigDecimal * @ param right a Double * @ return the multiplication of left by right * @ since 1.0 */ public static Number multiply ( BigDecimal left , Double right ) { } }
return NumberMath . multiply ( left , right ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link IdentifierType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link IdentifierType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "parameterID" ) public JAXBElement < IdentifierType > createParameterID ( IdentifierType value ) { } }
return new JAXBElement < IdentifierType > ( _ParameterID_QNAME , IdentifierType . class , null , value ) ;
public class SparqlLogicConceptMatcher { /** * Obtain all the matching resources with the URIs of { @ code origin } within the range of MatchTypes provided , both inclusive . * @ param origins URIs to match * @ param minType the minimum MatchType we want to obtain * @ param maxType the maximum MatchType we want to obtain * @ return a { @ link com . google . common . collect . Table } with the result of the matching indexed by origin URI and then destination URI . */ @ Override public Table < URI , URI , MatchResult > listMatchesWithinRange ( Set < URI > origins , MatchType minType , MatchType maxType ) { } }
return obtainMatchResults ( origins , minType , maxType ) ;
public class WebhookResourcesImpl { /** * Resets the shared secret for the specified Webhook . For more information about how a shared secret is used , * see Authenticating Callbacks . * It mirrors to the following Smartsheet REST API method : POST / webhooks / { webhookId } / resetsharedsecret * @ param webhookId the webhook Id * @ return the Webhook shared secret * @ throws IllegalArgumentException if any argument is null or empty string * @ throws InvalidRequestException if there is any problem with the REST API request * @ throws AuthorizationException if there is any problem with the REST API authorization ( access token ) * @ throws ResourceNotFoundException if the resource cannot be found * @ throws ServiceUnavailableException if the REST API service is not available ( possibly due to rate limiting ) * @ throws SmartsheetException if there is any other error during the operation */ public WebhookSharedSecret resetSharedSecret ( long webhookId ) throws SmartsheetException { } }
HttpRequest request = createHttpRequest ( this . getSmartsheet ( ) . getBaseURI ( ) . resolve ( "webhooks/" + webhookId + "/resetsharedsecret" ) , HttpMethod . POST ) ; HttpResponse response = getSmartsheet ( ) . getHttpClient ( ) . request ( request ) ; WebhookSharedSecret secret = null ; switch ( response . getStatusCode ( ) ) { case 200 : try { secret = this . smartsheet . getJsonSerializer ( ) . deserialize ( WebhookSharedSecret . class , response . getEntity ( ) . getContent ( ) ) ; } catch ( JsonParseException e ) { throw new SmartsheetException ( e ) ; } catch ( JsonMappingException e ) { throw new SmartsheetException ( e ) ; } catch ( IOException e ) { throw new SmartsheetException ( e ) ; } break ; default : handleError ( response ) ; } getSmartsheet ( ) . getHttpClient ( ) . releaseConnection ( ) ; return secret ;
public class CmsUser { /** * Sets the email address of this user . < p > * @ param email the email address to set */ public void setEmail ( String email ) { } }
checkEmail ( email ) ; if ( email != null ) { email = email . trim ( ) ; } m_email = email ;
public class SamlMetadataUIParserAction { /** * Gets entity id from request . * @ param requestContext the request context * @ return the entity id from request */ protected String getEntityIdFromRequest ( final RequestContext requestContext ) { } }
val request = WebUtils . getHttpServletRequestFromExternalWebflowContext ( requestContext ) ; return request . getParameter ( this . entityIdParameterName ) ;
public class Part { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( PART_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class NumberSelectorServiceImpl { /** * Here we expect a perfect match in DB . * Several rules regarding organization scoping will be applied in the DAO * filter to ensure only applicable numbers in DB are retrieved . * @ param number the number to match against IncomingPhoneNumbersDao * @ param sourceOrganizationSid * @ param destinationOrganizationSid * @ return the matched number , null if not matched . */ private NumberSelectionResult findSingleNumber ( String number , Sid sourceOrganizationSid , Sid destinationOrganizationSid , Set < SearchModifier > modifiers ) { } }
NumberSelectionResult matchedNumber = new NumberSelectionResult ( null , false , null ) ; IncomingPhoneNumberFilter . Builder filterBuilder = IncomingPhoneNumberFilter . Builder . builder ( ) ; filterBuilder . byPhoneNumber ( number ) ; int unfilteredCount = numbersDao . getTotalIncomingPhoneNumbers ( filterBuilder . build ( ) ) ; if ( unfilteredCount > 0 ) { if ( destinationOrganizationSid != null ) { filterBuilder . byOrgSid ( destinationOrganizationSid . toString ( ) ) ; } else if ( ( modifiers != null ) && ( modifiers . contains ( SearchModifier . ORG_COMPLIANT ) ) ) { // restrict search to non SIP numbers logger . debug ( "Organizations are null, restrict PureSIP numbers." ) ; filterBuilder . byPureSIP ( Boolean . FALSE ) ; } // this rule forbids using PureSIP numbers if organizations doesnt match // this means only external provider numbers will be evaluated in DB if ( sourceOrganizationSid != null && ! sourceOrganizationSid . equals ( destinationOrganizationSid ) ) { filterBuilder . byPureSIP ( Boolean . FALSE ) ; } IncomingPhoneNumberFilter numFilter = filterBuilder . build ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Searching with filter:" + numFilter ) ; } List < IncomingPhoneNumber > matchedNumbers = numbersDao . getIncomingPhoneNumbersByFilter ( numFilter ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Num of results:" + matchedNumbers . size ( ) + ".unfilteredCount:" + unfilteredCount ) ; } // we expect a perfect match , so first result taken if ( matchedNumbers != null && matchedNumbers . size ( ) > 0 ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Matched number with filter:" + matchedNumbers . get ( 0 ) . toString ( ) ) ; } matchedNumber = new NumberSelectionResult ( matchedNumbers . get ( 0 ) , Boolean . FALSE , ResultType . REGULAR ) ; } else { // without organization fileter we had results , so this is // marked as filtered by organization matchedNumber . setOrganizationFiltered ( Boolean . TRUE ) ; } } return matchedNumber ;
public class WebFragmentTypeImpl { /** * Returns all < code > security - role < / code > elements * @ return list of < code > security - role < / code > */ public List < SecurityRoleType < WebFragmentType < T > > > getAllSecurityRole ( ) { } }
List < SecurityRoleType < WebFragmentType < T > > > list = new ArrayList < SecurityRoleType < WebFragmentType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "security-role" ) ; for ( Node node : nodeList ) { SecurityRoleType < WebFragmentType < T > > type = new SecurityRoleTypeImpl < WebFragmentType < T > > ( this , "security-role" , childNode , node ) ; list . add ( type ) ; } return list ;
public class ValidDBInstanceModificationsMessage { /** * Valid storage options for your DB instance . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setStorage ( java . util . Collection ) } or { @ link # withStorage ( java . util . Collection ) } if you want to override * the existing values . * @ param storage * Valid storage options for your DB instance . * @ return Returns a reference to this object so that method calls can be chained together . */ public ValidDBInstanceModificationsMessage withStorage ( ValidStorageOptions ... storage ) { } }
if ( this . storage == null ) { setStorage ( new java . util . ArrayList < ValidStorageOptions > ( storage . length ) ) ; } for ( ValidStorageOptions ele : storage ) { this . storage . add ( ele ) ; } return this ;
public class ViSearch { /** * send search actions after finishing search * @ param action * @ param reqId */ private void sendSolutionActions ( String action , String reqId ) { } }
if ( reqId != null && ! reqId . equals ( "" ) ) { Map < String , String > map = Maps . newHashMap ( ) ; map . put ( "action" , action ) ; map . put ( "reqid" , reqId ) ; this . sendEvent ( map ) ; }
public class ExpressionAggregate { /** * Get the result of a SetFunction or an ordinary value * @ param currValue instance of set function or value * @ param session context * @ return object */ public Object getAggregatedValue ( Session session , Object currValue ) { } }
if ( currValue == null ) { // A VoltDB extension APPROX _ COUNT _ DISTINCT return opType == OpTypes . COUNT || opType == OpTypes . APPROX_COUNT_DISTINCT ? ValuePool . INTEGER_0 : null ; /* disable 2 lines . . . return opType = = OpTypes . COUNT ? ValuePool . INTEGER _ 0 : null ; . . . disabled 2 lines */ // End of VoltDB extension } return ( ( SetFunction ) currValue ) . getValue ( ) ;
public class FileHdrScreen { /** * Set up all the screen fields . */ public void setupSFields ( ) { } }
Record query = this . getMainRecord ( ) ; for ( int fieldSeq = query . getFieldSeq ( FileHdr . FILE_NAME ) ; fieldSeq < query . getFieldSeq ( FileHdr . FILE_NOTES ) ; fieldSeq ++ ) query . getField ( fieldSeq ) . setupFieldView ( this ) ; // Add this view to the list for ( int fieldSeq = query . getFieldSeq ( FileHdr . FILE_NOTES ) + 1 ; fieldSeq < query . getFieldCount ( ) ; fieldSeq ++ ) query . getField ( fieldSeq ) . setupFieldView ( this ) ; // Add this view to the list query . getField ( FileHdr . FILE_NOTES ) . setupFieldView ( this ) ; // Add this view to the list
public class MwRevisionDumpFileProcessor { /** * Processes current XML starting from a & lt ; siteinfo & gt ; start tag up to * the corresponding end tag . This method uses the current state of * { @ link # xmlReader } and stores its results in according member fields . * When the method has finished , { @ link # xmlReader } will be at the next * element after the closing tag of this block . * @ throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @ throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlSiteinfo ( ) throws XMLStreamException { } }
this . xmlReader . next ( ) ; // skip current start tag while ( this . xmlReader . hasNext ( ) ) { switch ( this . xmlReader . getEventType ( ) ) { case XMLStreamConstants . START_ELEMENT : switch ( xmlReader . getLocalName ( ) ) { case MwRevisionDumpFileProcessor . E_SITENAME : this . sitename = this . xmlReader . getElementText ( ) ; break ; case MwRevisionDumpFileProcessor . E_NAMESPACE : Integer namespaceKey = Integer . parseInt ( this . xmlReader . getAttributeValue ( null , MwRevisionDumpFileProcessor . A_NSKEY ) ) ; this . namespaces . put ( namespaceKey , this . xmlReader . getElementText ( ) ) ; break ; case MwRevisionDumpFileProcessor . E_BASEURL : this . baseUrl = this . xmlReader . getElementText ( ) ; break ; } break ; case XMLStreamConstants . END_ELEMENT : if ( MwRevisionDumpFileProcessor . E_SITEINFO . equals ( this . xmlReader . getLocalName ( ) ) ) { return ; } break ; } this . xmlReader . next ( ) ; }
public class WordNumberCollectorBundle { /** * Add word word number collector bundle . * @ param key the key * @ param word the word * @ return the word number collector bundle */ public WordNumberCollectorBundle addWord ( String key , String word ) { } }
wordCollector . add ( key , word ) ; return this ;
public class ClosureRewriteModule { /** * Rewrites top level var names from * " var foo ; console . log ( foo ) ; " to * " var module $ contents $ Foo _ foo ; console . log ( module $ contents $ Foo _ foo ) ; " */ private void maybeUpdateTopLevelName ( NodeTraversal t , Node nameNode ) { } }
String name = nameNode . getString ( ) ; if ( ! currentScript . isModule || ! currentScript . topLevelNames . contains ( name ) ) { return ; } Var var = t . getScope ( ) . getVar ( name ) ; // If the name refers to a var that is not from the top level scope . if ( var == null || var . getScope ( ) . getRootNode ( ) != currentScript . rootNode ) { // Then it shouldn ' t be renamed . return ; } // If the name is part of a destructuring import , the import rewriting will take care of it if ( var . getNameNode ( ) == nameNode && nameNode . getParent ( ) . isStringKey ( ) && nameNode . getGrandparent ( ) . isObjectPattern ( ) ) { Node destructuringLhsNode = nameNode . getGrandparent ( ) . getParent ( ) ; if ( isCallTo ( destructuringLhsNode . getLastChild ( ) , GOOG_REQUIRE ) || isCallTo ( destructuringLhsNode . getLastChild ( ) , GOOG_REQUIRETYPE ) ) { return ; } } // If the name is an alias for an imported namespace rewrite from // " new Foo ; " to " new module $ exports $ Foo ; " boolean nameIsAnAlias = currentScript . namesToInlineByAlias . containsKey ( name ) ; if ( nameIsAnAlias && var . getNode ( ) != nameNode ) { maybeAddAliasToSymbolTable ( nameNode , currentScript . legacyNamespace ) ; String namespaceToInline = currentScript . namesToInlineByAlias . get ( name ) ; if ( namespaceToInline . equals ( currentScript . getBinaryNamespace ( ) ) ) { currentScript . hasCreatedExportObject = true ; } safeSetMaybeQualifiedString ( nameNode , namespaceToInline ) ; // Make sure this action won ' t shadow a local variable . if ( namespaceToInline . indexOf ( '.' ) != - 1 ) { String firstQualifiedName = namespaceToInline . substring ( 0 , namespaceToInline . indexOf ( '.' ) ) ; Var shadowedVar = t . getScope ( ) . getVar ( firstQualifiedName ) ; if ( shadowedVar == null || shadowedVar . isGlobal ( ) || shadowedVar . getScope ( ) . isModuleScope ( ) ) { return ; } t . report ( shadowedVar . getNode ( ) , IMPORT_INLINING_SHADOWS_VAR , shadowedVar . getName ( ) , namespaceToInline ) ; } return ; } // For non - import alias names rewrite from // " var foo ; console . log ( foo ) ; " to // " var module $ contents $ Foo _ foo ; console . log ( module $ contents $ Foo _ foo ) ; " safeSetString ( nameNode , currentScript . contentsPrefix + name ) ;
public class AsymmetricCrypto { /** * 加密 * @ param data 被加密的bytes * @ param keyType 私钥或公钥 { @ link KeyType } * @ return 加密后的bytes */ @ Override public byte [ ] encrypt ( byte [ ] data , KeyType keyType ) { } }
final Key key = getKeyByType ( keyType ) ; final int inputLen = data . length ; final int maxBlockSize = this . encryptBlockSize < 0 ? inputLen : this . encryptBlockSize ; lock . lock ( ) ; try ( ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; ) { cipher . init ( Cipher . ENCRYPT_MODE , key ) ; int offSet = 0 ; byte [ ] cache ; // 剩余长度 int remainLength = inputLen ; // 对数据分段加密 while ( remainLength > 0 ) { cache = cipher . doFinal ( data , offSet , Math . min ( remainLength , maxBlockSize ) ) ; out . write ( cache , 0 , cache . length ) ; offSet += maxBlockSize ; remainLength = inputLen - offSet ; } return out . toByteArray ( ) ; } catch ( Exception e ) { throw new CryptoException ( e ) ; } finally { lock . unlock ( ) ; }
public class LazyObject { /** * Fields for an object are attached as children on the token representing * the object itself . This method finds the correct field for a given key * and returns its first child - the child being the value for that field . * This is a utility method used internally to extract field values . * @ param key the name of the desired field * @ return the first child of the matching field token if one exists * @ throws LazyException if the field does not exist */ private LazyNode getFieldToken ( String key ) throws LazyException { } }
LazyNode child = root . child ; while ( child != null ) { if ( keyMatch ( key , child ) ) { return child . child ; } child = child . next ; } throw new LazyException ( "Unknown field '" + key + "'" ) ;
public class CPDefinitionVirtualSettingUtil { /** * Returns the cp definition virtual setting where classNameId = & # 63 ; and classPK = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param classNameId the class name ID * @ param classPK the class pk * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching cp definition virtual setting , or < code > null < / code > if a matching cp definition virtual setting could not be found */ public static CPDefinitionVirtualSetting fetchByC_C ( long classNameId , long classPK , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByC_C ( classNameId , classPK , retrieveFromCache ) ;
public class Utils { /** * Converts the first character of the string to uppercase . Does NOT deal with surrogate pairs . */ public static String initCaps ( String in ) { } }
return in . length ( ) < 2 ? in . toUpperCase ( ) : in . substring ( 0 , 1 ) . toUpperCase ( ) + in . substring ( 1 ) ;
public class AbstractTrace { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . utils . Trace # debug ( java . lang . Object , java . lang . Class , java . lang . String , java . lang . Object ) */ public final void debug ( Object source , Class sourceClass , String methodName , Object object ) { } }
debug ( source , sourceClass , methodName , new Object [ ] { object } ) ;
public class IO { /** * { @ inheritDoc } */ @ Override public final < B > IO < B > zip ( Applicative < Function < ? super A , ? extends B > , IO < ? > > appFn ) { } }
@ SuppressWarnings ( "unchecked" ) IO < Object > source = ( IO < Object > ) this ; @ SuppressWarnings ( "unchecked" ) IO < Function < Object , Object > > zip = ( IO < Function < Object , Object > > ) ( Object ) appFn ; return new Compose < > ( source , a ( zip ) ) ;
public class Predicate { /** * Gets the operator value for this Predicate . * @ return operator * The operator to use for filtering the data returned . * < span class = " constraint Required " > This field is required * and should not be { @ code null } . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . PredicateOperator getOperator ( ) { } }
return operator ;
public class FontFidelityImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . FONT_FIDELITY__STP_FNT_EX : return getStpFntEx ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class InternalXtextParser { /** * InternalXtext . g : 128:1 : entryRuleAbstractMetamodelDeclaration : ruleAbstractMetamodelDeclaration EOF ; */ public final void entryRuleAbstractMetamodelDeclaration ( ) throws RecognitionException { } }
try { // InternalXtext . g : 129:1 : ( ruleAbstractMetamodelDeclaration EOF ) // InternalXtext . g : 130:1 : ruleAbstractMetamodelDeclaration EOF { before ( grammarAccess . getAbstractMetamodelDeclarationRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; ruleAbstractMetamodelDeclaration ( ) ; state . _fsp -- ; after ( grammarAccess . getAbstractMetamodelDeclarationRule ( ) ) ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { } return ;
public class Solo { /** * Checks if a CheckBox displaying the specified text is checked . * @ param text the text that the { @ link CheckBox } displays , specified as a regular expression * @ return { @ code true } if a { @ link CheckBox } displaying the specified text is checked and { @ code false } if it is not checked */ public boolean isCheckBoxChecked ( String text ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "isCheckBoxChecked(\"" + text + "\")" ) ; } return checker . isButtonChecked ( CheckBox . class , text ) ;
public class CDDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . CDD__XOC_BASE : setXocBase ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__YOC_BASE : setYocBase ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__XOC_UNITS : setXocUnits ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__YOC_UNITS : setYocUnits ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__XOC_SIZE : setXocSize ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__YOC_SIZE : setYocSize ( ( Integer ) newValue ) ; return ; case AfplibPackage . CDD__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class XMLHelper { /** * Get the first direct child element of the passed element . * @ param aStartNode * The element to start searching . May be < code > null < / code > . * @ return < code > null < / code > if the passed element does not have any direct * child element . */ @ Nullable public static Element getFirstChildElement ( @ Nullable final Node aStartNode ) { } }
if ( aStartNode == null ) return null ; return NodeListIterator . createChildNodeIterator ( aStartNode ) . findFirstMapped ( filterNodeIsElement ( ) , x -> ( Element ) x ) ;
public class ImgCompressUtils { /** * 采用JPEG编码图片 * @ param desFile 指定压缩后图片存放地址 , 包括图片名称 * @ param desImg 编码源图片 * @ param quality 编码质量 * @ throws Exception */ private static void encodeImg ( String desFile , BufferedImage desImg , Float quality ) { } }
FileOutputStream out = null ; try { if ( quality != null ) { if ( quality > 1.0 || quality < 0.0 ) { throw new Exception ( "quality参数指定值不正确" ) ; } } out = new FileOutputStream ( desFile ) ; // 图片采用JPEG格式编码 JPEGImageEncoder encoder = JPEGCodec . createJPEGEncoder ( out ) ; if ( quality != null ) { // 编码参数 JPEGEncodeParam jep = JPEGCodec . getDefaultJPEGEncodeParam ( desImg ) ; // 设置压缩质量 jep . setQuality ( quality , true ) ; // 开始编码并输出 encoder . encode ( desImg , jep ) ; } else { // 开始编码并输出 encoder . encode ( desImg ) ; } } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { CommonUtils . closeIOStream ( null , out ) ; }
public class RPC { /** * Construct a server for a protocol implementation instance listening on a * port and address . */ public static Server getServer ( final Object instance , final String bindAddress , final int port , final int numHandlers ) throws IOException { } }
return new Server ( instance , bindAddress , port , numHandlers ) ;
public class StandardResponsesApi { /** * Remove a Standard Response from Favorites * @ param id id of the Standard Response to remove from Favorites ( required ) * @ param removeFavoritesData Request parameters . ( optional ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse deleteStandardResponseFavorite ( String id , RemoveFavoritesData removeFavoritesData ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = deleteStandardResponseFavoriteWithHttpInfo ( id , removeFavoritesData ) ; return resp . getData ( ) ;
public class Nfs3 { /** * / * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . Nfs # wrapped _ sendRmdir ( com . emc . ecs . nfsclient . nfs . NfsRmdirRequest ) */ public Nfs3RmdirResponse wrapped_sendRmdir ( NfsRmdirRequest request ) throws IOException { } }
RpcResponseHandler < Nfs3RmdirResponse > responseHandler = new NfsResponseHandler < Nfs3RmdirResponse > ( ) { /* ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . rpc . RpcResponseHandler # makeNewResponse ( ) */ protected Nfs3RmdirResponse makeNewResponse ( ) { return new Nfs3RmdirResponse ( ) ; } } ; _rpcWrapper . callRpcWrapped ( request , responseHandler ) ; return responseHandler . getResponse ( ) ;
public class ScanBenchmark { /** * / * Just get everything */ private static Druids . ScanQueryBuilder basicA ( final BenchmarkSchemaInfo basicSchema ) { } }
final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec ( Collections . singletonList ( basicSchema . getDataInterval ( ) ) ) ; return Druids . newScanQueryBuilder ( ) . dataSource ( "blah" ) . intervals ( intervalSpec ) . order ( ordering ) ;
public class CommerceUserSegmentCriterionPersistenceImpl { /** * Removes all the commerce user segment criterions where commerceUserSegmentEntryId = & # 63 ; from the database . * @ param commerceUserSegmentEntryId the commerce user segment entry ID */ @ Override public void removeByCommerceUserSegmentEntryId ( long commerceUserSegmentEntryId ) { } }
for ( CommerceUserSegmentCriterion commerceUserSegmentCriterion : findByCommerceUserSegmentEntryId ( commerceUserSegmentEntryId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceUserSegmentCriterion ) ; }
public class MPIO { /** * Test whether or not a particular ME is reachable . * @ param me The ME to test . * @ return True if TRM has a path to this ME , false otherwise . */ public boolean isMEReachable ( SIBUuid8 meUuid ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isMEReachable" , new Object [ ] { this , meUuid } ) ; boolean result = ( findMPConnection ( meUuid ) != null ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isMEReachable" , Boolean . valueOf ( result ) ) ; return result ;
public class NFSubstitution { /** * Parses the description , creates the right kind of substitution , * and initializes it based on the description . * @ param pos The substitution ' s position in the rule text of the * rule that owns it . * @ param rule The rule containing this substitution * @ param rulePredecessor The rule preceding the one that contains * this substitution in the rule set ' s rule list ( this is used * only for > > > substitutions ) . * @ param ruleSet The rule set containing the rule containing this * substitution * @ param formatter The RuleBasedNumberFormat that ultimately owns * this substitution * @ param description The description to parse to build the substitution * ( this is just the substring of the rule ' s description containing * the substitution token itself ) * @ return A new substitution constructed according to the description */ public static NFSubstitution makeSubstitution ( int pos , NFRule rule , NFRule rulePredecessor , NFRuleSet ruleSet , RuleBasedNumberFormat formatter , String description ) { } }
// if the description is empty , return a NullSubstitution if ( description . length ( ) == 0 ) { return null ; } switch ( description . charAt ( 0 ) ) { case '<' : if ( rule . getBaseValue ( ) == NFRule . NEGATIVE_NUMBER_RULE ) { // throw an exception if the rule is a negative number rule // / CLOVER : OFF // If you look at the call hierarchy of this method , the rule would // never be directly modified by the user and therefore makes the // following pointless unless the user changes the ruleset . throw new IllegalArgumentException ( "<< not allowed in negative-number rule" ) ; // / CLOVER : ON } else if ( rule . getBaseValue ( ) == NFRule . IMPROPER_FRACTION_RULE || rule . getBaseValue ( ) == NFRule . PROPER_FRACTION_RULE || rule . getBaseValue ( ) == NFRule . MASTER_RULE ) { // if the rule is a fraction rule , return an IntegralPartSubstitution return new IntegralPartSubstitution ( pos , ruleSet , description ) ; } else if ( ruleSet . isFractionSet ( ) ) { // if the rule set containing the rule is a fraction // rule set , return a NumeratorSubstitution return new NumeratorSubstitution ( pos , rule . getBaseValue ( ) , formatter . getDefaultRuleSet ( ) , description ) ; } else { // otherwise , return a MultiplierSubstitution return new MultiplierSubstitution ( pos , rule , ruleSet , description ) ; } case '>' : if ( rule . getBaseValue ( ) == NFRule . NEGATIVE_NUMBER_RULE ) { // if the rule is a negative - number rule , return // an AbsoluteValueSubstitution return new AbsoluteValueSubstitution ( pos , ruleSet , description ) ; } else if ( rule . getBaseValue ( ) == NFRule . IMPROPER_FRACTION_RULE || rule . getBaseValue ( ) == NFRule . PROPER_FRACTION_RULE || rule . getBaseValue ( ) == NFRule . MASTER_RULE ) { // if the rule is a fraction rule , return a // FractionalPartSubstitution return new FractionalPartSubstitution ( pos , ruleSet , description ) ; } else if ( ruleSet . isFractionSet ( ) ) { // if the rule set owning the rule is a fraction rule set , // throw an exception // / CLOVER : OFF // If you look at the call hierarchy of this method , the rule would // never be directly modified by the user and therefore makes the // following pointless unless the user changes the ruleset . throw new IllegalArgumentException ( ">> not allowed in fraction rule set" ) ; // / CLOVER : ON } else { // otherwise , return a ModulusSubstitution return new ModulusSubstitution ( pos , rule , rulePredecessor , ruleSet , description ) ; } case '=' : return new SameValueSubstitution ( pos , ruleSet , description ) ; default : // and if it ' s anything else , throw an exception // / CLOVER : OFF // If you look at the call hierarchy of this method , the rule would // never be directly modified by the user and therefore makes the // following pointless unless the user changes the ruleset . throw new IllegalArgumentException ( "Illegal substitution character" ) ; // / CLOVER : ON }
public class BasicValidator { /** * the Bond tests */ private ValidationReport validateStereoChemistry ( IBond bond ) { } }
ValidationReport report = new ValidationReport ( ) ; ValidationTest bondStereo = new ValidationTest ( bond , "Defining stereochemistry on bonds is not safe." , "Use atom based stereochemistry." ) ; if ( bond . getStereo ( ) != IBond . Stereo . NONE ) { report . addWarning ( bondStereo ) ; } else { report . addOK ( bondStereo ) ; } return report ;
public class FilesystemPath { /** * Essentially chroot */ public PathImpl createRoot ( SchemeMap schemeMap ) { } }
FilesystemPath restriction = ( FilesystemPath ) copy ( ) ; restriction . _schemeMap = schemeMap ; restriction . _root = this ; restriction . _pathname = "/" ; restriction . _userPath = "/" ; return restriction ;
public class HadoopRandomIndexing { /** * Creates an { @ link IntegerVector } of the kind specified by the user . */ private IntegerVector createSemanticVector ( ) { } }
IntegerVector v = ( useSparseSemantics ) ? new CompactSparseIntegerVector ( vectorLength ) : new DenseIntVector ( vectorLength ) ; return v ;
public class RequestExpirationScheduler { /** * Aborts the scheduled request . If force is true , it will abort even if the request is not completed * @ param requestId , unique identifier of the request * @ param force , force abort */ public void abortScheduling ( String requestId , boolean force ) { } }
if ( trace ) { log . tracef ( "Request[%s] abort scheduling" , requestId ) ; } ScheduledRequest scheduledRequest = scheduledRequests . get ( requestId ) ; if ( scheduledRequest != null && ( scheduledRequest . request . isDone ( ) || force ) ) { scheduledRequest . scheduledFuture . cancel ( false ) ; scheduledRequests . remove ( requestId ) ; }
public class SparkUtils { /** * Write an object to HDFS ( or local ) using default Java object serialization * @ param path Path to write the object to * @ param toWrite Object to write * @ param sc Spark context */ public static void writeObjectToFile ( String path , Object toWrite , SparkContext sc ) throws IOException { } }
writeObjectToFile ( path , toWrite , sc . hadoopConfiguration ( ) ) ;
public class LogImpl { /** * Set if debugging is on or off . * @ param debug */ public synchronized void setDebug ( boolean debug ) { } }
boolean oldDebug = _debugOn ; if ( _debugOn && ! debug ) this . message ( DEBUG , "DEBUG OFF" ) ; _debugOn = debug ; if ( ! oldDebug && debug ) this . message ( DEBUG , "DEBUG ON" ) ;
public class MaterialComboBox { /** * Set directly all the values that will be stored into * combobox and build options into it . */ public void setValues ( List < T > values , boolean fireEvents ) { } }
String [ ] stringValues = new String [ values . size ( ) ] ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { stringValues [ i ] = keyFactory . generateKey ( values . get ( i ) ) ; } suppressChangeEvent = ! fireEvents ; $ ( listbox . getElement ( ) ) . val ( stringValues ) . trigger ( "change" , selectedIndex ) ; suppressChangeEvent = false ;
public class AbstractUserArgumentProcessor { /** * Do a DB search with the input file against representative PDB domains * @ param cache * @ param searchFile * @ param outputFile * @ throws ConfigurationException */ private void runDbSearch ( AtomCache cache , String searchFile , String outputFile , int useNrCPUs , StartupParameters params ) throws ConfigurationException { } }
System . out . println ( "will use " + useNrCPUs + " CPUs." ) ; PDBFileReader reader = new PDBFileReader ( ) ; Structure structure1 = null ; try { structure1 = reader . getStructure ( searchFile ) ; } catch ( IOException e ) { throw new ConfigurationException ( "could not parse as PDB file: " + searchFile ) ; } File searchF = new File ( searchFile ) ; String name1 = "CUSTOM" ; StructureAlignment algorithm = getAlgorithm ( ) ; MultiThreadedDBSearch dbSearch = new MultiThreadedDBSearch ( name1 , structure1 , outputFile , algorithm , useNrCPUs , params . isDomainSplit ( ) ) ; dbSearch . setCustomFile1 ( searchF . getAbsolutePath ( ) ) ; dbSearch . run ( ) ;
public class ListTemplateElementBuilder { /** * Adds a button which redirects to an URL when clicked to the current * { @ link ListTemplateElement } . There can be at most 3 buttons per element . * @ param title * the button label . * @ param url * the URL to whom redirect when clicked . * @ return this builder . */ public ListTemplateElementBuilder addUrlButton ( String title , String url ) { } }
Button button = ButtonFactory . createUrlButton ( title , url ) ; this . element . addButton ( button ) ; return this ;
public class XmlHttpProxyServlet { /** * / * Allow for a EL style replacements in the serviceURL * The constant REMOTE _ USER will replace the contents of $ { REMOTE _ USER } * with the return value of request . getRemoteUserver ( ) if it is not null * otherwise the $ { REMOTE _ USER } is replaced with a blank . * If you use $ { session . somekey } the $ { session . somekey } will be replaced with * the String value of the session varialble somekey or blank if the session key * does not exist . */ private String processURL ( String url , HttpServletRequest req , HttpServletResponse res ) { } }
String serviceURL = url ; int start = url . indexOf ( "${" ) ; int end = url . indexOf ( "}" , start ) ; if ( end != - 1 ) { String prop = url . substring ( start + 2 , end ) . trim ( ) ; // no matter what we will remove the $ { } // default to blank like the JSP EL String replace = "" ; if ( REMOTE_USER . equals ( prop ) ) { if ( req . getRemoteUser ( ) != null ) replace = req . getRemoteUser ( ) ; } if ( prop . toLowerCase ( ) . startsWith ( "session." ) ) { String sessionKey = prop . substring ( "session." . length ( ) , prop . length ( ) ) ; if ( req . getSession ( ) . getAttribute ( sessionKey ) != null ) { // force to a string replace = req . getSession ( ) . getAttribute ( sessionKey ) . toString ( ) ; } } serviceURL = serviceURL . substring ( 0 , start ) + replace + serviceURL . substring ( end + 1 , serviceURL . length ( ) ) ; } // call recursively to process more than one instance of a $ { in the serviceURL if ( serviceURL . indexOf ( "${" ) != - 1 ) serviceURL = processURL ( serviceURL , req , res ) ; return serviceURL ;
public class Manager { /** * Reads all registered output parameters from specified statement . * @ param connection an SQL database connection . * @ param statement an SQL callable statement . * @ throws SQLException if error occurs while setting up parameters . * @ see Connection * @ see Statement * @ since v1.0 */ public void parseAll ( Connection connection , Statement statement ) throws SQLException { } }
for ( Object key : mappings . keySet ( ) ) { Parameter parameter = mappings . get ( key ) ; if ( parameter . getOutput ( ) != null ) { Object output = statement . read ( key ) ; Converter decoder = parameter . getDecoder ( ) ; if ( decoder != null ) { output = decoder . perform ( connection , output ) ; } parameter . getOutput ( ) . setValue ( output ) ; } }
public class TridiagonalDecompositionHouseholder_DDRM { /** * Extracts the tridiagonal matrix found in the decomposition . * @ param T If not null then the results will be stored here . Otherwise a new matrix will be created . * @ return The extracted T matrix . */ @ Override public DMatrixRMaj getT ( DMatrixRMaj T ) { } }
T = UtilDecompositons_DDRM . checkZeros ( T , N , N ) ; T . data [ 0 ] = QT . data [ 0 ] ; for ( int i = 1 ; i < N ; i ++ ) { T . set ( i , i , QT . get ( i , i ) ) ; double a = QT . get ( i - 1 , i ) ; T . set ( i - 1 , i , a ) ; T . set ( i , i - 1 , a ) ; } if ( N > 1 ) { T . data [ ( N - 1 ) * N + N - 1 ] = QT . data [ ( N - 1 ) * N + N - 1 ] ; T . data [ ( N - 1 ) * N + N - 2 ] = QT . data [ ( N - 2 ) * N + N - 1 ] ; } return T ;
public class Redwood { /** * Captures System . out and System . err and redirects them * to Redwood logging . * @ param captureOut True is System . out should be captured * @ param captureErr True if System . err should be captured */ protected static void captureSystemStreams ( boolean captureOut , boolean captureErr ) { } }
if ( captureOut ) { System . setOut ( new RedwoodPrintStream ( STDOUT , realSysOut ) ) ; } if ( captureErr ) { System . setErr ( new RedwoodPrintStream ( STDERR , realSysErr ) ) ; }
public class jazz_license { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
jazz_license_responses result = ( jazz_license_responses ) service . get_payload_formatter ( ) . string_to_resource ( jazz_license_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . jazz_license_response_array ) ; } jazz_license [ ] result_jazz_license = new jazz_license [ result . jazz_license_response_array . length ] ; for ( int i = 0 ; i < result . jazz_license_response_array . length ; i ++ ) { result_jazz_license [ i ] = result . jazz_license_response_array [ i ] . jazz_license [ 0 ] ; } return result_jazz_license ;
public class EhCacheService { /** * Gets an entry from the cache . * @ param key Item key . * @ return the stored object , { @ literal null } if none of expired . */ @ Override public Object get ( String key ) { } }
Element element = cache . get ( key ) ; if ( element != null ) { return element . getObjectValue ( ) ; } return null ;
public class DateUtils { /** * Roll the java . util . Date forward or backward . * @ param startDate - The start date * @ param period Calendar . YEAR etc * @ param amount - Negative to rollbackwards . */ public static java . util . Date rollDateTime ( java . util . Date startDate , int period , int amount ) { } }
GregorianCalendar gc = new GregorianCalendar ( ) ; gc . setTime ( startDate ) ; gc . add ( period , amount ) ; return new java . util . Date ( gc . getTime ( ) . getTime ( ) ) ;
public class SimpleDocTreeVisitor { /** * { @ inheritDoc } This implementation calls { @ code defaultAction } . * @ param node { @ inheritDoc } * @ param p { @ inheritDoc } * @ return the result of { @ code defaultAction } */ @ Override public R visitIndex ( IndexTree node , P p ) { } }
return defaultAction ( node , p ) ;
public class DoubleParameter { /** * Set the minimum value . The minimum value is an acceptable value if and * only if inclusive is set to true . * @ param minimumValue the minimum value * @ param inclusive whether the minimum value is a valid value * @ return this */ public DoubleParameter setMinimumValue ( double minimumValue , boolean inclusive ) { } }
if ( hasDefaultValue ) { if ( inclusive ) { Util . checkParameter ( minimumValue <= defaultValue , "Minimum value (" + minimumValue + ") must be less than or equal to default (" + defaultValue + ")" ) ; } else { Util . checkParameter ( minimumValue < defaultValue , "Minimum value (" + minimumValue + ") must be less than default (" + defaultValue + ")" ) ; } } else if ( hasMaximumValue ) { if ( inclusive && maximumValueInclusive ) { Util . checkParameter ( minimumValue <= maximumValue , "Minimum value (" + minimumValue + ") must be less than or equal to maximum (" + maximumValue + ")" ) ; } else { Util . checkParameter ( minimumValue < maximumValue , "Minimum value (" + minimumValue + ") must be less than maximum (" + maximumValue + ")" ) ; } } this . hasMinimumValue = true ; this . minimumValue = minimumValue ; this . minimumValueInclusive = inclusive ; return this ;
public class Encryptor { /** * < p > Generates an initialization vector . < / p > */ private byte [ ] generateIV ( ) { } }
byte [ ] iv = new byte [ ivLength ] ; SecureRandom random = new SecureRandom ( ) ; random . nextBytes ( iv ) ; return iv ;
public class AbstractDockerMojo { /** * Attempt to load a GCR compatible RegistryAuthSupplier based on a few conditions : * < ol > * < li > First check to see if the environemnt variable DOCKER _ GOOGLE _ CREDENTIALS is set and points * to a readable file < / li > * < li > Otherwise check if the Google Application Default Credentials can be loaded < / li > * < / ol > * Note that we use a special environment variable of our own in addition to any environment * variable that the ADC loading uses ( GOOGLE _ APPLICATION _ CREDENTIALS ) in case there is a need for * the user to use the latter env var for some other purpose in their build . * @ return a GCR RegistryAuthSupplier , or null * @ throws MojoExecutionException if an IOException occurs while loading the explicitly - requested * credentials */ private RegistryAuthSupplier googleContainerRegistryAuthSupplier ( ) throws MojoExecutionException { } }
GoogleCredentials credentials = null ; final String googleCredentialsPath = System . getenv ( "DOCKER_GOOGLE_CREDENTIALS" ) ; if ( googleCredentialsPath != null ) { final File file = new File ( googleCredentialsPath ) ; if ( file . exists ( ) ) { try { try ( FileInputStream inputStream = new FileInputStream ( file ) ) { credentials = GoogleCredentials . fromStream ( inputStream ) ; getLog ( ) . info ( "Using Google credentials from file: " + file . getAbsolutePath ( ) ) ; } } catch ( IOException ex ) { throw new MojoExecutionException ( "Cannot load credentials referenced by " + "DOCKER_GOOGLE_CREDENTIALS environment variable" , ex ) ; } } } // use the ADC last if ( credentials == null ) { try { credentials = GoogleCredentials . getApplicationDefault ( ) ; getLog ( ) . info ( "Using Google application default credentials" ) ; } catch ( IOException ex ) { // No GCP default credentials available getLog ( ) . debug ( "Failed to load Google application default credentials" , ex ) ; } } if ( credentials == null ) { return null ; } return ContainerRegistryAuthSupplier . forCredentials ( credentials ) . build ( ) ;
public class PushOptions { /** * Sets the sort value for the update * @ param field the field to sort by * @ param direction the direction of the sort * @ return this */ public PushOptions sort ( final String field , final int direction ) { } }
if ( sort != null ) { throw new IllegalStateException ( "sortDocument can not be set if sort already is" ) ; } if ( sortDocument == null ) { sortDocument = new BasicDBObject ( ) ; } sortDocument . put ( field , direction ) ; return this ;
public class Timecode { /** * Breaks a string on any non - numeric character and returns the index token , zero indexed */ private int getToken ( String inString , int index ) throws Timecode . TimecodeException { } }
inString = inString . trim ( ) ; String valid = "0123456789" ; String token = "" ; int count = 0 ; for ( int i = 0 ; i < inString . length ( ) ; i ++ ) { char current = inString . charAt ( i ) ; if ( valid . indexOf ( current ) > - 1 ) { token += current ; } else { count ++ ; if ( count > index ) break ; // Found the token . token = "" ; // Start reading the next token } } if ( count < index || token . equals ( "" ) ) throw new Timecode . TimecodeException ( "Malformed timecode '" + inString + "', can't get index=" + index ) ; try { return Integer . parseInt ( token ) ; } catch ( NumberFormatException ex ) { throw new Timecode . TimecodeException ( "Malformed timecode '" + inString + "', '" + token + "' is not an integer" ) ; }
public class Matrix4d { /** * Set the values of this matrix by reading 16 float values from the given { @ link ByteBuffer } in column - major order , * starting at its current position . * The ByteBuffer is expected to contain the values in column - major order . * The position of the ByteBuffer will not be changed by this method . * @ param buffer * the ByteBuffer to read the matrix values from in column - major order * @ return this */ public Matrix4d setFloats ( ByteBuffer buffer ) { } }
MemUtil . INSTANCE . getf ( this , buffer . position ( ) , buffer ) ; properties = 0 ; return this ;
public class DefaultDOManager { /** * Checks the object registry for the given object . */ @ Override public boolean objectExists ( String pid ) throws StorageDeviceException { } }
boolean registered = objectExistsInRegistry ( pid ) ; boolean exists = false ; if ( ! registered && m_checkableStore ) { try { exists = ( ( ICheckable ) m_permanentStore ) . objectExists ( pid ) ; } catch ( LowlevelStorageException e ) { throw new StorageDeviceException ( e . getMessage ( ) , e ) ; } } if ( exists && ! registered ) { logger . warn ( "{} was not in the registry, but appears to be in store." + " Registry db may be in inconsistent state." , pid ) ; } return registered || exists ;
public class JDBCXAResource { /** * Stub . See implementation comment in the method for why this is * not implemented yet . * @ return false . */ public boolean isSameRM ( XAResource xares ) throws XAException { } }
if ( ! ( xares instanceof JDBCXAResource ) ) { return false ; } return xaDataSource == ( ( JDBCXAResource ) xares ) . getXADataSource ( ) ;
public class Aggregate { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > calculate the standard deviation for a given value over a group ; * < br / > uses a standard two - pass method , with N - 1 as the denominator , * and should be used when taking a sample of the population for an unbiased estimate . < / i > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . aggregate ( ) . < b > stdev ( n . property ( " age " ) ) < / b > < / i > < / div > * < br / > */ public RElement < RElement < ? > > stdev ( JcProperty property ) { } }
ReturnExpression rx = getReturnExpression ( ) ; ReturnAggregate ra = ( ReturnAggregate ) rx . getReturnValue ( ) ; ra . setType ( AggregateFunctionType . STDEV ) ; ra . setArgument ( property ) ; RElement < RElement < ? > > ret = new RElement < RElement < ? > > ( rx ) ; return ret ;
public class BNFHeadersImpl { /** * Save a reference to a new buffer with header parse information . This is * not part of the " created list " and will not be released by this message * class . * @ param buffer */ public void addParseBuffer ( WsByteBuffer buffer ) { } }
// increment where we ' re about to put the new buffer in int index = ++ this . parseIndex ; if ( null == this . parseBuffers ) { // first parse buffer to track this . parseBuffers = new WsByteBuffer [ BUFFERS_INITIAL_SIZE ] ; this . parseBuffersStartPos = new int [ BUFFERS_INITIAL_SIZE ] ; for ( int i = 0 ; i < BUFFERS_INITIAL_SIZE ; i ++ ) { this . parseBuffersStartPos [ i ] = HeaderStorage . NOTSET ; } } else if ( index == this . parseBuffers . length ) { // grow the array int size = index + BUFFERS_MIN_GROWTH ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Increasing parse buffer array size to " + size ) ; } WsByteBuffer [ ] tempNew = new WsByteBuffer [ size ] ; System . arraycopy ( this . parseBuffers , 0 , tempNew , 0 , index ) ; this . parseBuffers = tempNew ; int [ ] posNew = new int [ size ] ; System . arraycopy ( this . parseBuffersStartPos , 0 , posNew , 0 , index ) ; for ( int i = index ; i < size ; i ++ ) { posNew [ i ] = HeaderStorage . NOTSET ; } this . parseBuffersStartPos = posNew ; } this . parseBuffers [ index ] = buffer ;
public class StencilEngine { /** * Sets the active global scopes * @ param globalScopes New active global scopes */ public void setGlobalScopes ( Iterable < GlobalScope > globalScopes ) { } }
this . globalScopes = Lists . newArrayList ( Iterables . concat ( globalScopes , serviceGlobalScopes ) ) ;
public class LoadBalancerLoadBalancingRulesInner { /** * Gets all the load balancing rules in a load balancer . * @ param resourceGroupName The name of the resource group . * @ param loadBalancerName The name of the load balancer . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; LoadBalancingRuleInner & gt ; object */ public Observable < Page < LoadBalancingRuleInner > > listAsync ( final String resourceGroupName , final String loadBalancerName ) { } }
return listWithServiceResponseAsync ( resourceGroupName , loadBalancerName ) . map ( new Func1 < ServiceResponse < Page < LoadBalancingRuleInner > > , Page < LoadBalancingRuleInner > > ( ) { @ Override public Page < LoadBalancingRuleInner > call ( ServiceResponse < Page < LoadBalancingRuleInner > > response ) { return response . body ( ) ; } } ) ;
public class ExceptionUtils { /** * < p > Produces a < code > List < / code > of stack frames - the message * is not included . Only the trace of the specified exception is * returned , any caused by trace is stripped . < / p > * < p > This works in most cases - it will only fail if the exception * message contains a line that starts with : * < code > & quot ; & nbsp ; & nbsp ; & nbsp ; at & quot ; . < / code > < / p > * @ param t is any throwable * @ return List of stack frames */ @ GwtIncompatible ( "incompatible method" ) static List < String > getStackFrameList ( final Throwable t ) { } }
final String stackTrace = getStackTrace ( t ) ; final String linebreak = System . lineSeparator ( ) ; final StringTokenizer frames = new StringTokenizer ( stackTrace , linebreak ) ; final List < String > list = new ArrayList < > ( ) ; boolean traceStarted = false ; while ( frames . hasMoreTokens ( ) ) { final String token = frames . nextToken ( ) ; // Determine if the line starts with < whitespace > at final int at = token . indexOf ( "at" ) ; if ( at != - 1 && token . substring ( 0 , at ) . trim ( ) . isEmpty ( ) ) { traceStarted = true ; list . add ( token ) ; } else if ( traceStarted ) { break ; } } return list ;
public class JcrManagedConnection { /** * Gets the metadata information for this connection ' s underlying EIS resource manager instance . * @ return ManagedConnectionMetaData instance * @ throws ResourceException generic exception if operation fails */ @ Override public ManagedConnectionMetaData getMetaData ( ) throws ResourceException { } }
try { return new JcrManagedConnectionMetaData ( mcf . getRepository ( ) , session ) ; } catch ( Exception e ) { throw new ResourceException ( e ) ; }
public class AnalyzeLocal { /** * Analyze the data quality of sequence data - provides a report on missing values , values that don ' t comply with schema , etc * @ param schema Schema for data * @ param data Data to analyze * @ return DataQualityAnalysis object */ public static DataQualityAnalysis analyzeQualitySequence ( Schema schema , SequenceRecordReader data ) { } }
int nColumns = schema . numColumns ( ) ; List < QualityAnalysisState > states = new ArrayList < > ( ) ; QualityAnalysisAddFunction addFn = new QualityAnalysisAddFunction ( schema ) ; while ( data . hasNext ( ) ) { List < List < Writable > > seq = data . sequenceRecord ( ) ; for ( List < Writable > step : seq ) { states = addFn . apply ( states , step ) ; } } List < ColumnQuality > list = new ArrayList < > ( nColumns ) ; for ( QualityAnalysisState qualityState : states ) { list . add ( qualityState . getColumnQuality ( ) ) ; } return new DataQualityAnalysis ( schema , list ) ;
public class TaskExecutor { @ Override public CompletableFuture < Acknowledge > submitTask ( TaskDeploymentDescriptor tdd , JobMasterId jobMasterId , Time timeout ) { } }
try { final JobID jobId = tdd . getJobId ( ) ; final JobManagerConnection jobManagerConnection = jobManagerTable . get ( jobId ) ; if ( jobManagerConnection == null ) { final String message = "Could not submit task because there is no JobManager " + "associated for the job " + jobId + '.' ; log . debug ( message ) ; throw new TaskSubmissionException ( message ) ; } if ( ! Objects . equals ( jobManagerConnection . getJobMasterId ( ) , jobMasterId ) ) { final String message = "Rejecting the task submission because the job manager leader id " + jobMasterId + " does not match the expected job manager leader id " + jobManagerConnection . getJobMasterId ( ) + '.' ; log . debug ( message ) ; throw new TaskSubmissionException ( message ) ; } if ( ! taskSlotTable . tryMarkSlotActive ( jobId , tdd . getAllocationId ( ) ) ) { final String message = "No task slot allocated for job ID " + jobId + " and allocation ID " + tdd . getAllocationId ( ) + '.' ; log . debug ( message ) ; throw new TaskSubmissionException ( message ) ; } // re - integrate offloaded data : try { tdd . loadBigData ( blobCacheService . getPermanentBlobService ( ) ) ; } catch ( IOException | ClassNotFoundException e ) { throw new TaskSubmissionException ( "Could not re-integrate offloaded TaskDeploymentDescriptor data." , e ) ; } // deserialize the pre - serialized information final JobInformation jobInformation ; final TaskInformation taskInformation ; try { jobInformation = tdd . getSerializedJobInformation ( ) . deserializeValue ( getClass ( ) . getClassLoader ( ) ) ; taskInformation = tdd . getSerializedTaskInformation ( ) . deserializeValue ( getClass ( ) . getClassLoader ( ) ) ; } catch ( IOException | ClassNotFoundException e ) { throw new TaskSubmissionException ( "Could not deserialize the job or task information." , e ) ; } if ( ! jobId . equals ( jobInformation . getJobId ( ) ) ) { throw new TaskSubmissionException ( "Inconsistent job ID information inside TaskDeploymentDescriptor (" + tdd . getJobId ( ) + " vs. " + jobInformation . getJobId ( ) + ")" ) ; } TaskMetricGroup taskMetricGroup = taskManagerMetricGroup . addTaskForJob ( jobInformation . getJobId ( ) , jobInformation . getJobName ( ) , taskInformation . getJobVertexId ( ) , tdd . getExecutionAttemptId ( ) , taskInformation . getTaskName ( ) , tdd . getSubtaskIndex ( ) , tdd . getAttemptNumber ( ) ) ; InputSplitProvider inputSplitProvider = new RpcInputSplitProvider ( jobManagerConnection . getJobManagerGateway ( ) , taskInformation . getJobVertexId ( ) , tdd . getExecutionAttemptId ( ) , taskManagerConfiguration . getTimeout ( ) ) ; TaskManagerActions taskManagerActions = jobManagerConnection . getTaskManagerActions ( ) ; CheckpointResponder checkpointResponder = jobManagerConnection . getCheckpointResponder ( ) ; GlobalAggregateManager aggregateManager = jobManagerConnection . getGlobalAggregateManager ( ) ; LibraryCacheManager libraryCache = jobManagerConnection . getLibraryCacheManager ( ) ; ResultPartitionConsumableNotifier resultPartitionConsumableNotifier = jobManagerConnection . getResultPartitionConsumableNotifier ( ) ; PartitionProducerStateChecker partitionStateChecker = jobManagerConnection . getPartitionStateChecker ( ) ; final TaskLocalStateStore localStateStore = localStateStoresManager . localStateStoreForSubtask ( jobId , tdd . getAllocationId ( ) , taskInformation . getJobVertexId ( ) , tdd . getSubtaskIndex ( ) ) ; final JobManagerTaskRestore taskRestore = tdd . getTaskRestore ( ) ; final TaskStateManager taskStateManager = new TaskStateManagerImpl ( jobId , tdd . getExecutionAttemptId ( ) , localStateStore , taskRestore , checkpointResponder ) ; Task task = new Task ( jobInformation , taskInformation , tdd . getExecutionAttemptId ( ) , tdd . getAllocationId ( ) , tdd . getSubtaskIndex ( ) , tdd . getAttemptNumber ( ) , tdd . getProducedPartitions ( ) , tdd . getInputGates ( ) , tdd . getTargetSlotNumber ( ) , taskExecutorServices . getMemoryManager ( ) , taskExecutorServices . getIOManager ( ) , taskExecutorServices . getNetworkEnvironment ( ) , taskExecutorServices . getKvStateService ( ) , taskExecutorServices . getBroadcastVariableManager ( ) , taskExecutorServices . getTaskEventDispatcher ( ) , taskStateManager , taskManagerActions , inputSplitProvider , checkpointResponder , aggregateManager , blobCacheService , libraryCache , fileCache , taskManagerConfiguration , taskMetricGroup , resultPartitionConsumableNotifier , partitionStateChecker , getRpcService ( ) . getExecutor ( ) ) ; log . info ( "Received task {}." , task . getTaskInfo ( ) . getTaskNameWithSubtasks ( ) ) ; boolean taskAdded ; try { taskAdded = taskSlotTable . addTask ( task ) ; } catch ( SlotNotFoundException | SlotNotActiveException e ) { throw new TaskSubmissionException ( "Could not submit task." , e ) ; } if ( taskAdded ) { task . startTaskThread ( ) ; return CompletableFuture . completedFuture ( Acknowledge . get ( ) ) ; } else { final String message = "TaskManager already contains a task for id " + task . getExecutionId ( ) + '.' ; log . debug ( message ) ; throw new TaskSubmissionException ( message ) ; } } catch ( TaskSubmissionException e ) { return FutureUtils . completedExceptionally ( e ) ; }