signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class PasswordCipher { /** * Convert the key to a byte array by compressing the 16 - bit characters to
* bytes . This XOR compression insures that the top 8 bits are still
* significant , so a key of " \ u0020 " yields a different cipher than a key of
* " \ u0120 " . */
private static byte [ ] convertKeyToByteArray ( String key ) { } } | byte [ ] result = new byte [ key . length ( ) ] ; for ( int i = 0 ; i < result . length ; i ++ ) { char thisChar = key . charAt ( i ) ; result [ i ] = ( byte ) ( thisChar >>> 8 & 0xFF ^ thisChar & 0xFF ) ; } return result ; |
public class MergePath { /** * Adds the classpath for the loader as paths in the MergePath .
* @ param loader class loader whose classpath should be used to search . */
public void addLocalClassPath ( ClassLoader loader ) { } } | String classpath = null ; if ( loader instanceof DynamicClassLoader ) classpath = ( ( DynamicClassLoader ) loader ) . getLocalClassPath ( ) ; else classpath = System . getProperty ( "java.class.path" ) ; addClassPath ( classpath ) ; |
public class SwingUtil { /** * Adjusts the widths and heights of the cells of the supplied table to fit their contents . */
public static void sizeToContents ( JTable table ) { } } | TableModel model = table . getModel ( ) ; TableColumn column = null ; Component comp = null ; int ccount = table . getColumnModel ( ) . getColumnCount ( ) , rcount = model . getRowCount ( ) , cellHeight = 0 ; for ( int cc = 0 ; cc < ccount ; cc ++ ) { int headerWidth = 0 , cellWidth = 0 ; column = table . getColumnModel ( ) . getColumn ( cc ) ; try { comp = column . getHeaderRenderer ( ) . getTableCellRendererComponent ( null , column . getHeaderValue ( ) , false , false , 0 , 0 ) ; headerWidth = comp . getPreferredSize ( ) . width ; } catch ( NullPointerException e ) { // getHeaderRenderer ( ) this doesn ' t work in 1.3
} for ( int rr = 0 ; rr < rcount ; rr ++ ) { Object cellValue = model . getValueAt ( rr , cc ) ; comp = table . getDefaultRenderer ( model . getColumnClass ( cc ) ) . getTableCellRendererComponent ( table , cellValue , false , false , 0 , cc ) ; Dimension psize = comp . getPreferredSize ( ) ; cellWidth = Math . max ( psize . width , cellWidth ) ; cellHeight = Math . max ( psize . height , cellHeight ) ; } column . setPreferredWidth ( Math . max ( headerWidth , cellWidth ) ) ; } if ( cellHeight > 0 ) { table . setRowHeight ( cellHeight ) ; } |
public class CodedInputStream { /** * Sets { @ code currentLimit } to ( current position ) + { @ code byteLimit } . This
* is called when descending into a length - delimited embedded message .
* < p > Note that { @ code pushLimit ( ) } does NOT affect how many bytes the
* { @ code CodedInputStream } reads from an underlying { @ code InputStream } when
* refreshing its buffer . If you need to prevent reading past a certain
* point in the underlying { @ code InputStream } ( e . g . because you expect it to
* contain more data after the end of the message which you need to handle
* differently ) then you must place a wrapper around your { @ code InputStream }
* which limits the amount of data that can be read from it .
* @ return the old limit . */
public int pushLimit ( int byteLimit ) throws InvalidProtocolBufferException { } } | if ( byteLimit < 0 ) { throw InvalidProtocolBufferException . negativeSize ( ) ; } byteLimit += totalBytesRetired + bufferPos ; final int oldLimit = currentLimit ; if ( byteLimit > oldLimit ) { throw InvalidProtocolBufferException . truncatedMessage ( ) ; } currentLimit = byteLimit ; recomputeBufferSizeAfterLimit ( ) ; return oldLimit ; |
public class NelderMead { /** * Attempts to find the minimal value of the given function .
* @ param eps the desired accuracy of the result .
* @ param iterationLimit the maximum number of iteration steps to allow . This value must be positive
* @ param f the function to optimize . This value can not be null
* @ param initalPoints the list of initial guess points . If too small , new ones will be generated . if too large ,
* the extra ones will be ignored . This list may not be empty
* @ param parallel { @ code true } if multiple threads should be used for
* optimization , or { @ code false } if a single thread should be used .
* @ return the computed value for the optimization . */
public Vec optimize ( double eps , int iterationLimit , Function f , List < Vec > initalPoints , boolean parallel ) { } } | if ( initalPoints . isEmpty ( ) ) throw new ArithmeticException ( "Empty Initial list. Can not determin dimension of problem" ) ; Vec init = initalPoints . get ( 0 ) ; int N = initalPoints . get ( 0 ) . length ( ) ; // The simplex verticies paired with their value from the objective function
List < ProbailityMatch < Vec > > simplex = new ArrayList < > ( N ) ; for ( Vec vars : initalPoints ) simplex . add ( new ProbailityMatch < > ( f . f ( vars , parallel ) , vars . clone ( ) ) ) ; Random rand = new Random ( initalPoints . hashCode ( ) ) ; while ( simplex . size ( ) < N + 1 ) { // Better simplex geneartion ?
DenseVector newSimplex = new DenseVector ( N ) ; for ( int i = 0 ; i < newSimplex . length ( ) ; i ++ ) if ( init . get ( i ) != 0 ) newSimplex . set ( i , init . get ( i ) * rand . nextGaussian ( ) ) ; else newSimplex . set ( i , rand . nextGaussian ( ) ) ; simplex . add ( new ProbailityMatch < > ( f . f ( newSimplex , parallel ) , newSimplex ) ) ; } Collections . sort ( simplex ) ; // Remove superfolusly given points
while ( simplex . size ( ) > N + 1 ) simplex . remove ( simplex . size ( ) - 1 ) ; // Center of gravity point
Vec x0 = new DenseVector ( N ) ; // reflection point
Vec xr = new DenseVector ( N ) ; // Extension point , also used for contraction
Vec xec = new DenseVector ( N ) ; // Temp space for compuations
Vec tmp = new DenseVector ( N ) ; final int lastIndex = simplex . size ( ) - 1 ; for ( int iterationCount = 0 ; iterationCount < iterationLimit ; iterationCount ++ ) { // Convergence check
if ( Math . abs ( simplex . get ( lastIndex ) . getProbability ( ) - simplex . get ( 0 ) . getProbability ( ) ) < eps ) break ; // Step 2 : valculate x0
x0 . zeroOut ( ) ; for ( ProbailityMatch < Vec > pm : simplex ) x0 . mutableAdd ( pm . getMatch ( ) ) ; x0 . mutableDivide ( simplex . size ( ) ) ; // Step 3 : Reflection
x0 . copyTo ( xr ) ; x0 . copyTo ( tmp ) ; tmp . mutableSubtract ( simplex . get ( lastIndex ) . getMatch ( ) ) ; xr . mutableAdd ( reflection , tmp ) ; double fxr = f . f ( xr ) ; if ( simplex . get ( 0 ) . getProbability ( ) <= fxr && fxr < simplex . get ( lastIndex - 1 ) . getProbability ( ) ) { insertIntoSimplex ( simplex , xr , fxr ) ; continue ; } // Step 4 : Expansion
if ( fxr < simplex . get ( 0 ) . getProbability ( ) ) // Best so far
{ x0 . copyTo ( xec ) ; xec . mutableAdd ( expansion , tmp ) ; // tmp still contains ( x0 - xWorst )
double fxec = f . f ( xec ) ; if ( fxec < fxr ) insertIntoSimplex ( simplex , xec , fxec ) ; // Even better ! Use this one
else insertIntoSimplex ( simplex , xr , fxr ) ; // Ehh , wasnt as good as we thought
continue ; } // Step 5 : Contraction
x0 . copyTo ( xec ) ; xec . mutableAdd ( contraction , tmp ) ; double fxec = f . f ( xec ) ; if ( fxec < simplex . get ( lastIndex ) . getProbability ( ) ) { insertIntoSimplex ( simplex , xec , fxec ) ; continue ; } // Step 6 : Reduction
Vec xBest = simplex . get ( 0 ) . getMatch ( ) ; for ( int i = 1 ; i < simplex . size ( ) ; i ++ ) { ProbailityMatch < Vec > pm = simplex . get ( i ) ; Vec xi = pm . getMatch ( ) ; xi . mutableSubtract ( xBest ) ; xi . mutableMultiply ( shrink ) ; xi . mutableAdd ( xBest ) ; pm . setProbability ( f . f ( xi ) ) ; } Collections . sort ( simplex ) ; } return simplex . get ( 0 ) . getMatch ( ) ; |
public class Injector { /** * Utility to grab a random element from an array of Strings . */
private static String randomElement ( ArrayList < String > list ) { } } | int index = random . nextInt ( list . size ( ) ) ; return list . get ( index ) ; |
public class CmsRoleManager { /** * Returns all users of organizational units for which the current user has
* the { @ link CmsRole # ACCOUNT _ MANAGER } role . < p >
* @ param cms the current cms context
* @ param ouFqn the fully qualified name of the organizational unit
* @ param includeSubOus if sub organizational units should be included in the search
* @ param includeWebusers if webuser organizational units should be included in the search
* @ return a list of { @ link org . opencms . file . CmsUser } objects
* @ throws CmsException if something goes wrong */
public List < CmsUser > getManageableUsers ( CmsObject cms , String ouFqn , boolean includeSubOus , boolean includeWebusers ) throws CmsException { } } | List < CmsOrganizationalUnit > ous = getManageableOrgUnits ( cms , ouFqn , includeSubOus , includeWebusers ) ; List < CmsUser > users = new ArrayList < CmsUser > ( ) ; Iterator < CmsOrganizationalUnit > it = ous . iterator ( ) ; while ( it . hasNext ( ) ) { CmsOrganizationalUnit orgUnit = it . next ( ) ; users . addAll ( OpenCms . getOrgUnitManager ( ) . getUsers ( cms , orgUnit . getName ( ) , false ) ) ; } return users ; |
public class CmsVfsSitemapService { /** * Returns if the given type id matches the xml - redirect resource type . < p >
* @ param typeId the resource type id
* @ return < code > true < / code > if the given type id matches the xml - redirect resource type */
@ SuppressWarnings ( "deprecation" ) private boolean isRedirectType ( int typeId ) { } } | try { return typeId == OpenCms . getResourceManager ( ) . getResourceType ( RECOURCE_TYPE_NAME_REDIRECT ) . getTypeId ( ) ; } catch ( Exception e ) { return false ; } |
public class ModelAnalysis { /** * 统计信息 , 计算删除非0特征后 , 权重的长度 */
public void removeZero ( ) { } } | boolean freeze = false ; if ( feature . isStopIncrement ( ) ) { feature . setStopIncrement ( false ) ; freeze = true ; } TIntObjectHashMap < String > index = ( TIntObjectHashMap < String > ) feature . toInverseIndexMap ( ) ; System . out . println ( "原字典大小" + index . size ( ) ) ; System . out . println ( "原字典大小" + feature . size ( ) ) ; StringFeatureAlphabet newfeat = new StringFeatureAlphabet ( ) ; cl . factory . setDefaultFeatureAlphabet ( newfeat ) ; for ( int i = 0 ; i < weights . length ; i ++ ) { TIntFloatIterator itt = weights [ i ] . data . iterator ( ) ; HashSparseVector ww = new HashSparseVector ( ) ; while ( itt . hasNext ( ) ) { itt . advance ( ) ; float v = itt . value ( ) ; if ( Math . abs ( v ) < 1e-3f ) continue ; String fea = index . get ( itt . key ( ) ) ; int newidx = newfeat . lookupIndex ( fea ) ; ww . put ( newidx , v ) ; } weights [ i ] = ww ; } newfeat . setStopIncrement ( freeze ) ; System . out . println ( "新字典大小" + newfeat . size ( ) ) ; System . out . println ( "新字典大小" + feature . size ( ) ) ; index . clear ( ) ; |
public class RawResponse { /** * Set response read charset .
* If not set , would get charset from response headers . If not found , would use UTF - 8. */
public RawResponse charset ( Charset charset ) { } } | return new RawResponse ( method , url , statusCode , statusLine , cookies , headers , body , charset , decompress ) ; |
public class BeanDescriptor { /** * This method returns an array containing all methods exposed in this
* class and any superclass . In the future , Java is not pleased to have
* access to private or protected methods .
* @ param beanClass the class
* @ return an array containing all the public methods in this class */
private Method [ ] getAllMethods ( Class < ? > beanClass ) { } } | Map < String , Method > uniqueMethods = new HashMap < > ( ) ; Class < ? > currentClass = beanClass ; while ( currentClass != null && currentClass != Object . class ) { addUniqueMethods ( uniqueMethods , currentClass . getDeclaredMethods ( ) ) ; // we also need to look for interface methods -
// because the class may be abstract
Class < ? > [ ] interfaces = currentClass . getInterfaces ( ) ; for ( Class < ? > anInterface : interfaces ) { addUniqueMethods ( uniqueMethods , anInterface . getMethods ( ) ) ; } currentClass = currentClass . getSuperclass ( ) ; } Collection < Method > methods = uniqueMethods . values ( ) ; return methods . toArray ( new Method [ 0 ] ) ; |
public class RuleBasedCollator { /** * Get the version of this collator object .
* @ return the version object associated with this collator */
@ Override public VersionInfo getVersion ( ) { } } | int version = tailoring . version ; int rtVersion = VersionInfo . UCOL_RUNTIME_VERSION . getMajor ( ) ; return VersionInfo . getInstance ( ( version >>> 24 ) + ( rtVersion << 4 ) + ( rtVersion >> 4 ) , ( ( version >> 16 ) & 0xff ) , ( ( version >> 8 ) & 0xff ) , ( version & 0xff ) ) ; |
public class MeasureUnitUtil { /** * Convert the given value expressed in the given unit to seconds .
* @ param value is the value to convert
* @ param inputUnit is the unit of the { @ code value }
* @ return the result of the convertion . */
@ Pure public static double toSeconds ( double value , TimeUnit inputUnit ) { } } | switch ( inputUnit ) { case DAYS : return value * 86400. ; case HOURS : return value * 3600. ; case MINUTES : return value * 60. ; case SECONDS : break ; case MILLISECONDS : return milli2unit ( value ) ; case MICROSECONDS : return micro2unit ( value ) ; case NANOSECONDS : return nano2unit ( value ) ; default : throw new IllegalArgumentException ( ) ; } return value ; |
public class DefaultGroovyMethods { /** * Support the subscript operator with a collection for a byte array
* @ param array a byte array
* @ param indices a collection of indices for the items to retrieve
* @ return list of the bytes at the given indices
* @ since 1.0 */
@ SuppressWarnings ( "unchecked" ) public static List < Byte > getAt ( byte [ ] array , Collection indices ) { } } | return primitiveArrayGet ( array , indices ) ; |
public class BundleReader { /** * Read an object . */
void readObject ( ) { } } | for ( int i = 0 ; i < sers . length ; ++ i ) { try { data [ i ] = sers [ i ] . fromByteBuffer ( buffer ) ; } catch ( UnsupportedOperationException e ) { throw new AbortException ( "Deserialization failed." , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO error" , e ) ; } } |
public class SimpleDataModel { /** * Adds an item to this model . Does not force the refresh of any display . */
public void addItem ( int index , T item ) { } } | if ( _items == null ) { return ; } _items . add ( index , item ) ; |
public class SegmentsUtil { /** * get float from segments .
* @ param segments target segments .
* @ param offset value offset . */
public static float getFloat ( MemorySegment [ ] segments , int offset ) { } } | if ( inFirstSegment ( segments , offset , 4 ) ) { return segments [ 0 ] . getFloat ( offset ) ; } else { return getFloatMultiSegments ( segments , offset ) ; } |
public class ListRateBasedRulesResult { /** * An array of < a > RuleSummary < / a > objects .
* @ param rules
* An array of < a > RuleSummary < / a > objects . */
public void setRules ( java . util . Collection < RuleSummary > rules ) { } } | if ( rules == null ) { this . rules = null ; return ; } this . rules = new java . util . ArrayList < RuleSummary > ( rules ) ; |
public class TimecodeBuilder { /** * @ param framenumber
* @ param framerate
* should be 29.97 , 59.94 , or 23.976 , otherwise the calculations will be off .
* @ return a frame number that lets us use non - dropframe computations to extract time components */
static long compensateForDropFrame ( final long framenumber , final double framerate ) { } } | // Code by David Heidelberger , adapted from Andrew Duncan
// Number of frames to drop on the minute marks is the nearest integer to 6 % of the framerate
final long dropFrames = Math . round ( framerate * .066666 ) ; final long framesPer10Minutes = Math . round ( framerate * 60 * 10 ) ; final long framesPerMinute = ( Math . round ( framerate ) * 60 ) - dropFrames ; final long d = framenumber / framesPer10Minutes ; final long m = framenumber % framesPer10Minutes ; // In the original post , the next line read m > 1 , which only worked for 29.97 . Jean - Baptiste Mardelle correctly pointed out that m should be compared to dropFrames .
if ( m > dropFrames ) return framenumber + ( dropFrames * 9 * d ) + dropFrames * ( ( m - dropFrames ) / framesPerMinute ) ; else return framenumber + dropFrames * 9 * d ; |
public class FileListTemporalDocumentIterator { /** * Returns the next document from the list . */
public TemporalDocument next ( ) { } } | // String fileName = filesToProcess . poll ( ) ;
NameAndTime n = filesToProcess . poll ( ) ; if ( n == null ) return null ; try { return ( n . hasTimeStamp ( ) ) ? new TemporalFileDocument ( n . fileName , n . timeStamp ) : new TemporalFileDocument ( n . fileName ) ; // no timestamp
} catch ( IOException ioe ) { return null ; } |
public class Highlights { /** * Indicate if the field of the given instance matches with the given
* highlight
* @ param instance The instance
* @ param field The field
* @ param highlight The highlight
* @ return true if match */
protected boolean match ( Object instance , Field field , Highlight highlight ) { } } | try { Object o = getPm ( ) . get ( instance , field . getProperty ( ) ) ; if ( o != null && o . toString ( ) . equals ( highlight . getValue ( ) ) && highlight . getField ( ) . equals ( field . getId ( ) ) ) { return true ; } } catch ( Exception e ) { } return false ; |
public class Step { /** * Checks if HTML text contains expected value .
* @ param pageElement
* Is target element
* @ param textOrKey
* Is the new data ( text or text in context ( after a save ) )
* @ throws TechnicalException
* is thrown if you have a technical error ( format , configuration , datas , . . . ) in NoraUi .
* Exception with { @ value com . github . noraui . utils . Messages # FAIL _ MESSAGE _ WRONG _ EXPECTED _ VALUE } message ( with screenshot , with exception ) or with
* { @ value com . github . noraui . utils . Messages # FAIL _ MESSAGE _ UNABLE _ TO _ FIND _ ELEMENT } message
* ( with screenshot , with exception )
* @ throws FailureException
* if the scenario encounters a functional error */
protected void checkText ( PageElement pageElement , String textOrKey ) throws TechnicalException , FailureException { } } | WebElement webElement = null ; String value = getTextOrKey ( textOrKey ) ; try { webElement = Context . waitUntil ( ExpectedConditions . presenceOfElementLocated ( Utilities . getLocator ( pageElement ) ) ) ; } catch ( final Exception e ) { new Result . Failure < > ( e . getMessage ( ) , Messages . getMessage ( Messages . FAIL_MESSAGE_UNABLE_TO_FIND_ELEMENT ) , true , pageElement . getPage ( ) . getCallBack ( ) ) ; } final String innerText = webElement == null ? null : webElement . getText ( ) ; logger . info ( "checkText() expected [{}] and found [{}]." , textOrKey . startsWith ( cryptoService . getPrefix ( ) ) ? SECURE_MASK : value , innerText ) ; if ( ! value . equals ( innerText ) ) { new Result . Failure < > ( innerText , Messages . format ( Messages . getMessage ( Messages . FAIL_MESSAGE_WRONG_EXPECTED_VALUE ) , pageElement , textOrKey . startsWith ( cryptoService . getPrefix ( ) ) ? SECURE_MASK : value , pageElement . getPage ( ) . getApplication ( ) ) , true , pageElement . getPage ( ) . getCallBack ( ) ) ; } |
public class LexTokenReader { /** * Read the next character from the stream . The position details are updated , accounting for newlines and tab stops .
* The next character is set in the " ch " field , as well as being returned for convenience .
* @ return the next character . */
private char rdCh ( ) { } } | char c = super . readCh ( ) ; if ( c == '\n' ) { linecount ++ ; charpos = 0 ; } else if ( c == '\t' ) { charpos += Properties . parser_tabstop - charpos % Properties . parser_tabstop ; } else if ( c != ( char ) - 1 ) { charpos ++ ; } ch = c ; charsread ++ ; offset = getCurrentRawReadOffset ( ) ; // if ( ch = = ' \ r ' )
// ch = rdCh ( ) ;
// } else
// ch = c ;
return ch ; |
public class PrepareRequestInterceptor { /** * Setup accept encoding header from configuration
* @ param requestHeaders */
private void setupAcceptEncoding ( Map < String , String > requestHeaders ) { } } | // validates whether to add headers for accept - encoding for compression
String acceptCompressionFormat = Config . getProperty ( Config . COMPRESSION_RESPONSE_FORMAT ) ; if ( StringUtils . hasText ( acceptCompressionFormat ) ) { requestHeaders . put ( RequestElements . HEADER_PARAM_ACCEPT_ENCODING , acceptCompressionFormat ) ; } |
public class DomHelpers { /** * Marshal a { @ link Document } to { @ link InputStream }
* @ param is the InputStream containing XML data
* @ return { @ link Document } containg the XML data
* @ throws SAXException
* @ throws IOException */
public static Document toDocument ( InputStream is ) throws MarshalException { } } | try { return newDocumentBuilder ( ) . parse ( is ) ; } catch ( SAXException e ) { IfmapJLog . error ( e . getMessage ( ) ) ; throw new MarshalException ( e . getMessage ( ) ) ; } catch ( IOException e ) { IfmapJLog . error ( e . getMessage ( ) ) ; throw new MarshalException ( e . getMessage ( ) ) ; } |
public class DOMHelper { /** * Test whether the given node is a namespace decl node . In DOM Level 2
* this can be done in a namespace - aware manner , but in Level 1 DOMs
* it has to be done by testing the node name .
* @ param n Node to be examined .
* @ return boolean - - true iff the node is an Attr whose name is
* " xmlns " or has the " xmlns : " prefix . */
public boolean isNamespaceNode ( Node n ) { } } | if ( Node . ATTRIBUTE_NODE == n . getNodeType ( ) ) { String attrName = n . getNodeName ( ) ; return ( attrName . startsWith ( "xmlns:" ) || attrName . equals ( "xmlns" ) ) ; } return false ; |
public class dnszone { /** * Use this API to unset the properties of dnszone resource .
* Properties that need to be unset are specified in args array . */
public static base_response unset ( nitro_service client , dnszone resource , String [ ] args ) throws Exception { } } | dnszone unsetresource = new dnszone ( ) ; unsetresource . zonename = resource . zonename ; return unsetresource . unset_resource ( client , args ) ; |
public class AbstractJobLauncher { /** * Combines the specified { @ link JobListener } with the { @ link # mandatoryJobListeners } for this job . Uses
* { @ link JobListeners # parallelJobListener ( List ) } to create a { @ link CloseableJobListener } that will execute all
* the { @ link JobListener } s in parallel . */
private CloseableJobListener getParallelCombinedJobListener ( JobState jobState , JobListener jobListener ) { } } | List < JobListener > jobListeners = Lists . newArrayList ( this . mandatoryJobListeners ) ; jobListeners . add ( jobListener ) ; Set < String > jobListenerClassNames = jobState . getPropAsSet ( ConfigurationKeys . JOB_LISTENERS_KEY , StringUtils . EMPTY ) ; for ( String jobListenerClassName : jobListenerClassNames ) { try { @ SuppressWarnings ( "unchecked" ) Class < ? extends JobListener > jobListenerClass = ( Class < ? extends JobListener > ) Class . forName ( jobListenerClassName ) ; jobListeners . add ( jobListenerClass . newInstance ( ) ) ; } catch ( ClassNotFoundException | InstantiationException | IllegalAccessException e ) { LOG . warn ( String . format ( "JobListener could not be created due to %s" , jobListenerClassName ) , e ) ; } } return JobListeners . parallelJobListener ( jobListeners ) ; |
public class KMLWriterDriver { /** * Specifies a custom KML schema that is used to add custom data to KML
* Features . The " id " attribute is required and must be unique within the
* KML file .
* < Schema > is always a child of < Document > .
* Syntax :
* < Schema name = " string " id = " ID " >
* < SimpleField type = " string " name = " string " >
* < displayName > . . . < / displayName > < ! - - string - - >
* < / SimpleField >
* < / Schema >
* @ param xmlOut
* @ param tableName */
private void writeSchema ( XMLStreamWriter xmlOut , ResultSetMetaData metaData ) throws XMLStreamException , SQLException { } } | columnCount = metaData . getColumnCount ( ) ; // The schema is writing only if there is more than one column
if ( columnCount > 1 ) { xmlOut . writeStartElement ( "Schema" ) ; xmlOut . writeAttribute ( "name" , tableName ) ; xmlOut . writeAttribute ( "id" , tableName ) ; // Write column metadata
kmlFields = new HashMap < Integer , String > ( ) ; for ( int fieldId = 1 ; fieldId <= metaData . getColumnCount ( ) ; fieldId ++ ) { final String fieldTypeName = metaData . getColumnTypeName ( fieldId ) ; if ( ! fieldTypeName . equalsIgnoreCase ( "geometry" ) ) { String fieldName = metaData . getColumnName ( fieldId ) ; writeSimpleField ( xmlOut , fieldName , getKMLType ( metaData . getColumnType ( fieldId ) , fieldTypeName ) ) ; kmlFields . put ( fieldId , fieldName ) ; } } xmlOut . writeEndElement ( ) ; // Write schema
} |
public class ApiOvhVps { /** * Reinstall the virtual server
* REST : POST / vps / { serviceName } / reinstall
* @ param doNotSendPassword [ required ] If asked , the installation password will NOT be sent ( only if sshKey defined )
* @ param softwareId [ required ] Id of the vps . Software type fetched in / template / { id } / software
* @ param language [ required ] Distribution language . default : en
* @ param templateId [ required ] Id of the vps . Template fetched in / templates list
* @ param sshKey [ required ] SSH key names to pre - install on your VPS ( name from / me / sshKey )
* @ param serviceName [ required ] The internal name of your VPS offer */
public OvhTask serviceName_reinstall_POST ( String serviceName , Boolean doNotSendPassword , String language , Long [ ] softwareId , String [ ] sshKey , Long templateId ) throws IOException { } } | String qPath = "/vps/{serviceName}/reinstall" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "doNotSendPassword" , doNotSendPassword ) ; addBody ( o , "language" , language ) ; addBody ( o , "softwareId" , softwareId ) ; addBody ( o , "sshKey" , sshKey ) ; addBody ( o , "templateId" , templateId ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ; |
public class AnimaQuery { /** * Set the column name using lambda , at the same time setting the value , the SQL generated is " column = ? "
* @ param function lambda expressions , use the Model : : getXXX
* @ param value column value
* @ param < S >
* @ param < R >
* @ return AnimaQuery */
public < S extends Model , R > AnimaQuery < T > where ( TypeFunction < S , R > function , Object value ) { } } | String columnName = AnimaUtils . getLambdaColumnName ( function ) ; conditionSQL . append ( " AND " ) . append ( columnName ) . append ( " = ?" ) ; paramValues . add ( value ) ; return this ; |
public class FormLoginAuthenticator { /** * This method handle formlogin ; If the SSO cookie exist , then it will use the cookie to authenticate . If
* the cookie does not exist , then it will re - redirect to the login page .
* @ param req
* @ param res
* @ param enableRedirect
* @ return */
private AuthenticationResult handleFormLogin ( HttpServletRequest req , HttpServletResponse res , WebRequest webRequest ) { } } | AuthenticationResult authResult = null ; authResult = ssoAuthenticator . authenticate ( webRequest ) ; if ( authResult != null ) { authResult . setAuditCredType ( AuditEvent . CRED_TYPE_FORM ) ; } if ( authResult != null && authResult . getStatus ( ) != AuthResult . FAILURE ) { postParameterHelper . restore ( req , res ) ; return authResult ; } try { authResult = providerAuthenticatorProxy . authenticate ( req , res , null ) ; } catch ( Exception e ) { return new AuthenticationResult ( AuthResult . FAILURE , e . getLocalizedMessage ( ) ) ; } if ( authResult . getStatus ( ) == AuthResult . CONTINUE ) { authResult = null ; if ( webRequest . isFormLoginRedirectEnabled ( ) ) { authResult = handleRedirect ( req , res , webRequest ) ; if ( authResult != null ) { authResult . setAuditCredType ( AuditEvent . CRED_TYPE_FORM ) ; authResult . setAuditOutcome ( AuditEvent . OUTCOME_REDIRECT ) ; } } } return authResult ; |
public class HBaseTableSchema { /** * Returns the HBase identifiers of all registered column qualifiers for a specific column family .
* @ param family The name of the column family for which the column qualifier identifiers are returned .
* @ return The HBase identifiers of all registered column qualifiers for a specific column family . */
byte [ ] [ ] getQualifierKeys ( String family ) { } } | Map < String , TypeInformation < ? > > qualifierMap = familyMap . get ( family ) ; if ( qualifierMap == null ) { throw new IllegalArgumentException ( "Family " + family + " does not exist in schema." ) ; } Charset c = Charset . forName ( charset ) ; byte [ ] [ ] qualifierKeys = new byte [ qualifierMap . size ( ) ] [ ] ; int i = 0 ; for ( String name : qualifierMap . keySet ( ) ) { qualifierKeys [ i ++ ] = name . getBytes ( c ) ; } return qualifierKeys ; |
public class srecLexer { /** * $ ANTLR start " COMMENT " */
public final void mCOMMENT ( ) throws RecognitionException { } } | try { int _type = COMMENT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // / home / victor / srec / core / src / main / antlr / srec . g : 155:2 : ( ' # ' ( . ) * NEWLINE )
// / home / victor / srec / core / src / main / antlr / srec . g : 155:4 : ' # ' ( . ) * NEWLINE
{ match ( '#' ) ; // / home / victor / srec / core / src / main / antlr / srec . g : 155:8 : ( . ) *
loop11 : do { int alt11 = 2 ; int LA11_0 = input . LA ( 1 ) ; if ( ( LA11_0 == '\r' ) ) { alt11 = 2 ; } else if ( ( LA11_0 == '\n' ) ) { alt11 = 2 ; } else if ( ( ( LA11_0 >= '\u0000' && LA11_0 <= '\t' ) || ( LA11_0 >= '\u000B' && LA11_0 <= '\f' ) || ( LA11_0 >= '\u000E' && LA11_0 <= '\uFFFF' ) ) ) { alt11 = 1 ; } switch ( alt11 ) { case 1 : // / home / victor / srec / core / src / main / antlr / srec . g : 155:8 : .
{ matchAny ( ) ; } break ; default : break loop11 ; } } while ( true ) ; skip ( ) ; mNEWLINE ( ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class Searcher { /** * Adds a boolean refinement for the next queries .
* @ param attribute the attribute to refine on .
* @ param value the value to refine with .
* @ return this { @ link Searcher } for chaining . */
@ SuppressWarnings ( { } } | "WeakerAccess" , "unused" } ) // For library users
public Searcher addBooleanFilter ( String attribute , Boolean value ) { booleanFilterMap . put ( attribute , value ) ; rebuildQueryFacetFilters ( ) ; return this ; |
public class JDBCSQLXML { /** * Retrieves this object ' s SQLXML value as a gzipped array of bytes ,
* possibly by terminating any in - progress write operations and converting
* accumulated intermediate data .
* @ throws java . sql . SQLException if an underlying I / O or transform
* error occurs
* @ return this object ' s SQLXML value */
protected byte [ ] getGZipData ( ) throws SQLException { } } | byte [ ] bytes = gZipData ( ) ; if ( bytes != null ) { return bytes ; } if ( ( this . outputStream == null ) || ! this . outputStream . isClosed ( ) || this . outputStream . isFreed ( ) ) { throw Exceptions . notReadable ( ) ; } try { setGZipData ( this . outputStream . toByteArray ( ) ) ; return gZipData ( ) ; } catch ( IOException ex ) { throw Exceptions . notReadable ( ) ; } finally { this . freeOutputStream ( ) ; } |
public class Period { /** * Returns a copy of this period with the years and months normalized .
* This normalizes the years and months units , leaving the days unit unchanged .
* The months unit is adjusted to have an absolute value less than 11,
* with the years unit being adjusted to compensate . For example , a period of
* " 1 Year and 15 months " will be normalized to " 2 years and 3 months " .
* The sign of the years and months units will be the same after normalization .
* For example , a period of " 1 year and - 25 months " will be normalized to
* " - 1 year and - 1 month " .
* This instance is immutable and unaffected by this method call .
* @ return a { @ code Period } based on this period with excess months normalized to years , not null
* @ throws ArithmeticException if numeric overflow occurs */
public Period normalized ( ) { } } | long totalMonths = toTotalMonths ( ) ; long splitYears = totalMonths / 12 ; int splitMonths = ( int ) ( totalMonths % 12 ) ; // no overflow
if ( splitYears == years && splitMonths == months ) { return this ; } return create ( Math . toIntExact ( splitYears ) , splitMonths , days ) ; |
public class StringUtil { /** * Takes a string token to be used as a key or qualifier and cleanses out reserved tokens . This
* operation is not symetrical . Logic is to replace all spaces and exclamation points with
* underscores .
* @ param token token to cleanse .
* @ return */
public static String cleanseToken ( String token ) { } } | if ( token == null || token . length ( ) == 0 ) { return token ; } ; String cleansed = token . replaceAll ( SPACE , UNDERSCORE ) ; cleansed = cleansed . replaceAll ( Constants . SEP , UNDERSCORE ) ; return cleansed ; |
public class DataContextUtils { /** * Return the resolved value from the context
* @ param data data context
* @ param group group name
* @ param key key name
* @ param defaultValue default if the value is not resolvable
* @ return resolved value or default */
public static String resolve ( final Map < String , Map < String , String > > data , final String group , final String key , final String defaultValue ) { } } | return null != data && null != data . get ( group ) && null != data . get ( group ) . get ( key ) ? data . get ( group ) . get ( key ) : defaultValue ; |
public class RulePhaseFinder { /** * Returns the phases loaded in this finder , sorted by Class . getSimpleName ( ) . */
public List < Class < ? extends RulePhase > > getAvailablePhases ( ) { } } | ArrayList < Class < ? extends RulePhase > > phases = new ArrayList < > ( this . cachedPhases . values ( ) ) ; // It could be sorted by the real order .
phases . sort ( new Comparator ( ) { @ Override public int compare ( Object phaseClass1 , Object phaseClass2 ) { if ( phaseClass1 == null || ! ( phaseClass1 instanceof Class ) ) return - 1 ; if ( phaseClass2 == null || ! ( phaseClass2 instanceof Class ) ) return 1 ; String name1 = ( ( Class < ? extends RulePhase > ) phaseClass1 ) . getSimpleName ( ) ; String name2 = ( ( Class < ? extends RulePhase > ) phaseClass2 ) . getSimpleName ( ) ; return name1 . compareToIgnoreCase ( name2 ) ; } } ) ; return phases ; |
public class CmsXmlUtils { /** * Helper to unmarshal ( read ) xml contents from a String into a document . < p >
* Using this method ensures that the OpenCms XML entitiy resolver is used . < p >
* @ param xmlData the xml data in a String
* @ param resolver the XML entity resolver to use
* @ return the base object initialized with the unmarshalled XML document
* @ throws CmsXmlException if something goes wrong
* @ see CmsXmlUtils # unmarshalHelper ( InputSource , EntityResolver ) */
public static Document unmarshalHelper ( String xmlData , EntityResolver resolver ) throws CmsXmlException { } } | return CmsXmlUtils . unmarshalHelper ( new InputSource ( new StringReader ( xmlData ) ) , resolver ) ; |
public class ProxyServlet { /** * Process an HTML get or post .
* @ exception ServletException From inherited class .
* @ exception IOException From inherited class . */
public void service ( HttpServletRequest req , HttpServletResponse res ) throws ServletException , IOException { } } | if ( ( proxyURLPrefix == null ) || ( proxyURLPrefix . length ( ) == 0 ) ) { // No proxy specified
super . service ( req , res ) ; return ; } ServletOutputStream streamOut = res . getOutputStream ( ) ; try { String proxyURLString = getProxyURLString ( req ) ; HttpRequestBase httpRequest = getHttpRequest ( req , proxyURLString ) ; addHeaders ( req , httpRequest ) ; this . getDataFromClient ( httpRequest , streamOut ) ; } catch ( Exception e ) { displayErrorInHtml ( streamOut , e ) ; } |
public class IncludeAnyOfTheseEventsFilterExt { /** * The InTrace UI supports a nice format for specifying multiple classes
* in a single string of text - - convenient for copy - n - paste .
* If you want com . ibm . MyClass and com . hp . YourClass , then delimit the multiple classes with a bar ( | ) , like this :
* com . ibm . MyClass | com . hp . YourClass
* in the " classes " instrumentation dialog ( http : / / mchr3k . github . io / org . intrace / ui . html ) .
* This syntax also works when requesting classes to be instrumented via code and the server agent .
* @ return */
public String getDelimitedListOfAllClasses ( ) { } } | int count = 0 ; StringBuilder sb = new StringBuilder ( ) ; for ( ITraceEvent event : m_criteriaList ) { if ( count ++ > 0 ) sb . append ( CLASS_DELIMITER ) ; sb . append ( event . getPackageAndClass ( ) ) ; } return sb . toString ( ) ; |
public class ConnectionImpl { /** * Checks the authority of a consumer to consume from a destination */
@ Deprecated private void checkBrowseAuthority ( DestinationHandler destination , String destinationName , SecurityContext secContext , boolean system ) throws SINotAuthorizedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkBrowseAuthority" , new Object [ ] { destination , destinationName , secContext , Boolean . valueOf ( system ) } ) ; // Check authority to browse a destination
if ( ! destination . isTemporary ( ) && ! system ) { boolean allowed = true ; boolean failingOpisIdentityAdopter = false ; // Perform the alternate user check first . If an alternateUser was set then we
// need to determine whether the connected subject has the authority to perform
// alternate user checks .
if ( secContext . isAlternateUserBased ( ) ) { if ( ! destination . checkDestinationAccess ( secContext , OperationType . IDENTITY_ADOPTER ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "checkBrowseAuthority" , "not authorized to perform alternate user checks on this destination" ) ; allowed = false ; failingOpisIdentityAdopter = true ; } } if ( allowed ) // ok so far
{ // Check if its the default exc dest
if ( destinationName . startsWith ( SIMPConstants . SYSTEM_DEFAULT_EXCEPTION_DESTINATION_PREFIX ) ) { // If its a def exc dest see if we have access to the prefix .
if ( ! _messageProcessor . getAccessChecker ( ) . checkDestinationAccess ( secContext , null , // home bus
SIMPConstants . SYSTEM_DEFAULT_EXCEPTION_DESTINATION_PREFIX , OperationType . RECEIVE ) ) { allowed = false ; } } else { if ( ! destination . checkDestinationAccess ( secContext , OperationType . BROWSE ) ) { allowed = false ; } } } if ( ! allowed ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkBrowseAuthority" , "not authorized to browse this destination" ) ; // Get the username
String userName = secContext . getUserName ( failingOpisIdentityAdopter ) ; OperationType operationType = failingOpisIdentityAdopter ? OperationType . IDENTITY_ADOPTER : OperationType . BROWSE ; // Build the message for the Exception and the Notification
String nlsMessage = nls . getFormattedMessage ( "USER_NOT_AUTH_BROWSE_ERROR_CWSIP0304" , new Object [ ] { destination . getName ( ) , userName } , null ) ; // Fire a Notification if Eventing is enabled
_accessChecker . fireDestinationAccessNotAuthorizedEvent ( destination . getName ( ) , userName , operationType , nlsMessage ) ; // Thrown if user denied access to destination
throw new SINotAuthorizedException ( nlsMessage ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkBrowseAuthority" ) ; |
public class RuntimeExceptionDeclared { /** * overrides the visitor to find declared runtime exceptions
* @ param obj
* the method object of the currently parsed method */
@ Override public void visitMethod ( final Method obj ) { } } | if ( obj . isSynthetic ( ) ) { return ; } ExceptionTable et = obj . getExceptionTable ( ) ; if ( et != null ) { String [ ] exNames = et . getExceptionNames ( ) ; Set < String > methodRTExceptions = new HashSet < > ( 6 ) ; int priority = LOW_PRIORITY ; boolean foundRuntime = false ; for ( String ex : exNames ) { boolean isRuntime = false ; if ( runtimeExceptions . contains ( ex ) ) { isRuntime = true ; } else { try { JavaClass exClass = Repository . lookupClass ( ex ) ; if ( exClass . instanceOf ( runtimeExceptionClass ) ) { runtimeExceptions . add ( ex ) ; if ( ex . startsWith ( "java.lang." ) ) { priority = NORMAL_PRIORITY ; } isRuntime = true ; } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } } if ( isRuntime ) { foundRuntime = true ; methodRTExceptions . add ( ex ) ; } } if ( foundRuntime ) { BugInstance bug = new BugInstance ( this , BugType . DRE_DECLARED_RUNTIME_EXCEPTION . name ( ) , priority ) . addClass ( this ) . addMethod ( this ) ; for ( String ex : methodRTExceptions ) { bug . add ( new StringAnnotation ( ex ) ) ; } bugReporter . reportBug ( bug ) ; } } |
public class TaggerScanner { /** * 通过识别jar的形式将其下的所有class添加到classSet集合中 .
* @ param classLoader 类加载器
* @ param url URL实例
* @ param packageName 包名
* @ param packageDirName 包路径名 */
private void addClassByJar ( ClassLoader classLoader , URL url , String packageName , String packageDirName ) { } } | // 从url中获取jar , 然后从此jar包得到一个枚举类 , 然后进行迭代 .
JarFile jar ; try { jar = ( ( JarURLConnection ) url . openConnection ( ) ) . getJarFile ( ) ; Enumeration < JarEntry > entries = jar . entries ( ) ; while ( entries . hasMoreElements ( ) ) { // 获取jar里的一个实体 可以是目录 和一些jar包里的其他文件 如META - INF等文件
JarEntry entry = entries . nextElement ( ) ; String name = entry . getName ( ) ; // 如果是以 / 开头的 , 则获取后面的字符串
if ( name . charAt ( 0 ) == '/' ) { name = name . substring ( 1 ) ; } // 如果前半部分和定义的包名相同 .
if ( name . startsWith ( packageDirName ) ) { int index = name . lastIndexOf ( '/' ) ; // 如果以 " / " 结尾 , 则是一个包 , 获取包名并把 " / " 替换成 " . "
if ( index != - 1 ) { packageName = name . substring ( 0 , index ) . replace ( '/' , '.' ) ; } // 如果可以迭代下去 并且是一个包 , 如果是一个 . class文件 而且不是目录
if ( index != - 1 && name . endsWith ( ".class" ) && ! entry . isDirectory ( ) ) { // 去掉后面的 " . class " 获取真正的类名
String className = name . substring ( packageName . length ( ) + 1 , name . length ( ) - 6 ) ; this . addClassByName ( classLoader , packageName + '.' + className ) ; } } } } catch ( IOException expected ) { // 此处不打印堆栈信息 .
log . warn ( "从jar文件中读取class出错." ) ; } |
public class Environment { /** * Returns the value set for the environment variable identified by the given name . If the environment variable
* is not set , then { @ code defaultValue } is returned .
* @ param environmentVariableName { @ link String } name of the environment variable .
* @ param defaultValue the default value to return if the specified environment variable is not set .
* @ return the value set the environment variable identified by the given name or { @ code defaultValue }
* if the named environment variable is not set .
* @ see # environmentVariables ( ) */
public String get ( String environmentVariableName , String defaultValue ) { } } | return environmentVariables ( ) . get ( environmentVariableName , defaultValue ) ; |
public class SDMath { /** * Cosine distance reduction operation . The output contains the cosine distance for each
* tensor / subset along the specified dimensions : < br >
* out = 1.0 - cosineSimilarity ( x , y ) < br >
* See { @ link # cosineSimilarity ( String , SDVariable , SDVariable , int . . . ) }
* @ param name Name of the output variable
* @ param x Input variable x
* @ param y Input variable y
* @ param dimensions Dimensions to calculate cosine similarity over
* @ return Output variable */
public SDVariable cosineDistance ( String name , SDVariable x , SDVariable y , int ... dimensions ) { } } | validateNumerical ( "cosine distance" , x , y ) ; SDVariable result = f ( ) . cosineDistance ( x , y , dimensions ) ; return updateVariableNameAndReference ( result , name ) ; |
public class SyncCollection { /** * Returns the limit for the number of results in this request .
* @ return The limit or 0 if there is no limit . */
public int getNumberOfResultsLimit ( ) { } } | if ( mLimit == null ) { return 0 ; } Integer limit = ( Integer ) mLimit . get ( WebDavSearch . NRESULTS ) ; return limit == null ? 0 : limit ; |
public class DynJS { public Object execute ( String source ) { } } | return newRunner ( ) . withContext ( this . defaultExecutionContext ) . withSource ( source ) . execute ( ) ; |
public class KiekerMeasureUtil { /** * Will be called to register the time when the method has started . */
public void measureBefore ( ) { } } | if ( ! CTRLINST . isMonitoringEnabled ( ) ) { return ; } hostname = VMNAME ; sessionId = SESSIONREGISTRY . recallThreadLocalSessionId ( ) ; traceId = CFREGISTRY . recallThreadLocalTraceId ( ) ; // entry point
if ( traceId == - 1 ) { entrypoint = true ; traceId = CFREGISTRY . getAndStoreUniqueThreadLocalTraceId ( ) ; CFREGISTRY . storeThreadLocalEOI ( 0 ) ; CFREGISTRY . storeThreadLocalESS ( 1 ) ; // next operation is ess + 1
eoi = 0 ; ess = 0 ; } else { entrypoint = false ; eoi = CFREGISTRY . incrementAndRecallThreadLocalEOI ( ) ; // ess > 1
ess = CFREGISTRY . recallAndIncrementThreadLocalESS ( ) ; // ess > = 0
if ( ( eoi == - 1 ) || ( ess == - 1 ) ) { LOG . error ( "eoi and/or ess have invalid values:" + " eoi == " + eoi + " ess == " + ess ) ; CTRLINST . terminateMonitoring ( ) ; } } tin = TIME . getTime ( ) ; |
public class Predicate { /** * Create an Iterable view of the specified Iterable that only contains
* elements that match the predicate .
* This Iterable can be iterated over at any time in the future to
* view the current predicate - matching elements of the input Iterable . */
public < E extends T > Iterable < E > createView ( final Iterable < E > input ) { } } | return new Iterable < E > ( ) { public Iterator < E > iterator ( ) { return filter ( input . iterator ( ) ) ; } } ; |
public class EntryBuffer { /** * Looks up an entry in the buffer .
* @ param index The entry index .
* @ param < T > The entry type .
* @ return The entry or { @ code null } if the entry is not present in the index . */
@ SuppressWarnings ( "unchecked" ) public < T extends Entry > T get ( long index ) { } } | Entry entry = buffer [ offset ( index ) ] ; return entry != null && entry . getIndex ( ) == index ? ( T ) entry . acquire ( ) : null ; |
public class AsynchronousRequest { /** * For more info on Character SAB API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / characters # Super _ Adventure _ Box _ . 28sab . 29 " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions
* @ param API API key
* @ param name character name
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws GuildWars2Exception invalid API key | empty character name
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see CharacterSAB character SAB info */
public void getCharacterSAB ( String API , String name , Callback < CharacterSAB > callback ) throws GuildWars2Exception , NullPointerException { } } | isParamValid ( new ParamChecker ( ParamType . API , API ) , new ParamChecker ( ParamType . CHAR , name ) ) ; gw2API . getCharacterSAB ( name , API ) . enqueue ( callback ) ; |
public class Events { /** * Returns the collection of events provided by the given service interface .
* @ param serviceInterface the client service interface
* @ return the events provided by the given service interface */
public static Map < Method , EventType > getMethodMap ( Class < ? > serviceInterface ) { } } | if ( ! serviceInterface . isInterface ( ) ) { Map < Method , EventType > events = new HashMap < > ( ) ; for ( Class < ? > iface : serviceInterface . getInterfaces ( ) ) { events . putAll ( findMethods ( iface ) ) ; } return events ; } return findMethods ( serviceInterface ) ; |
public class ResourceUtils { /** * Collects and combines the selection which may contain sources from
* different projects and / or multiple sources from same project .
* If selection contains hierarchical data ( like file and it ' s parent
* directory ) , the only topmost element is returned ( same for directories
* from projects ) .
* The children from selected parents are not resolved , so that the return
* value contains the ' highest ' possible hierarchical elements without
* children .
* @ param structuredSelection
* @ return a map with the project as a key and selected resources as value .
* If project itself was selected , then key is the same as value . */
public static Map < IProject , List < WorkItem > > getResourcesPerProject ( IStructuredSelection structuredSelection ) { } } | Map < IProject , List < WorkItem > > projectsMap = new HashMap < > ( ) ; for ( Iterator < ? > iter = structuredSelection . iterator ( ) ; iter . hasNext ( ) ; ) { Object element = iter . next ( ) ; WorkItem workItem = getWorkItem ( element ) ; if ( workItem == null ) { IWorkingSet wset = Util . getAdapter ( IWorkingSet . class , element ) ; if ( wset != null ) { mapResources ( wset , projectsMap ) ; continue ; } // Support for active changesets
ChangeSet set = Util . getAdapter ( ChangeSet . class , element ) ; for ( WorkItem change : getResources ( set ) ) { mapResource ( change , projectsMap , true ) ; } continue ; } mapResource ( workItem , projectsMap , false ) ; } return projectsMap ; |
public class TaskSkipped { /** * Serialize information about the skipped execution .
* @ param out The stream to which this object is serialized .
* @ throws IOException */
@ Trivial private void writeObject ( ObjectOutputStream out ) throws IOException { } } | PutField fields = out . putFields ( ) ; fields . put ( FAILURE , failure ) ; fields . put ( PREVIOUS_RESULT , previousResult ) ; out . writeFields ( ) ; |
public class OpDef { /** * < pre >
* Description of the output ( s ) .
* < / pre >
* < code > repeated . tensorflow . OpDef . ArgDef output _ arg = 3 ; < / code > */
public java . util . List < org . tensorflow . framework . OpDef . ArgDef > getOutputArgList ( ) { } } | return outputArg_ ; |
public class Seed { /** * Translates any exception that occurred in the application using an extensible exception mechanism .
* @ param exception the exception to handle .
* @ return the translated exception . */
public static BaseException translateException ( Exception exception ) { } } | if ( exception instanceof BaseException ) { return ( BaseException ) exception ; } else { for ( SeedExceptionTranslator exceptionTranslator : exceptionTranslators ) { if ( exceptionTranslator . canTranslate ( exception ) ) { return exceptionTranslator . translate ( exception ) ; } } return SeedException . wrap ( exception , CoreErrorCode . UNEXPECTED_EXCEPTION ) ; } |
public class ResourceFinder { /** * Assumes the class specified points to a file in the classpath that contains
* the name of a class that implements or is a subclass of the specfied class .
* Any class that cannot be loaded or are not assignable to the specified class will be
* skipped and placed in the ' resourcesNotLoaded ' collection .
* Example classpath :
* META - INF / java . io . InputStream # contains the classname org . acme . AcmeInputStream
* META - INF / java . io . InputStream # contains the classname org . widget . NeatoInputStream
* META - INF / java . io . InputStream # contains the classname com . foo . BarInputStream
* ResourceFinder finder = new ResourceFinder ( " META - INF / " ) ;
* List classes = finder . findAllImplementations ( java . io . InputStream . class ) ;
* classes . contains ( " org . acme . AcmeInputStream " ) ; / / true
* classes . contains ( " org . widget . NeatoInputStream " ) ; / / true
* classes . contains ( " com . foo . BarInputStream " ) ; / / true
* @ param interfase a superclass or interface
* @ return
* @ throws IOException if classLoader . getResources throws an exception */
public List < Class > findAvailableImplementations ( Class interfase ) throws IOException { } } | resourcesNotLoaded . clear ( ) ; List < Class > implementations = new ArrayList < > ( ) ; List < String > strings = findAvailableStrings ( interfase . getName ( ) ) ; for ( String className : strings ) { try { Class impl = classLoader . loadClass ( className ) ; if ( interfase . isAssignableFrom ( impl ) ) { implementations . add ( impl ) ; } else { resourcesNotLoaded . add ( className ) ; } } catch ( Exception notAvailable ) { resourcesNotLoaded . add ( className ) ; } } return implementations ; |
public class AT_Row { /** * Sets the right padding for all cells in the row .
* @ param paddingRight new padding , ignored if smaller than 0
* @ return this to allow chaining */
public AT_Row setPaddingRight ( int paddingRight ) { } } | if ( this . hasCells ( ) ) { for ( AT_Cell cell : this . getCells ( ) ) { cell . getContext ( ) . setPaddingRight ( paddingRight ) ; } } return this ; |
public class IssueServiceUtils { /** * Collects the group ( s ) an issue belongs to according to its own list of types
* and a grouping specification .
* @ param issueTypes Issue types
* @ param groupingSpecification Group - & gt ; ( Group types )
* @ return List of group the issue belongs to */
public static Set < String > getIssueGroups ( Collection < String > issueTypes , Map < String , Set < String > > groupingSpecification ) { } } | Set < String > groups = new HashSet < > ( ) ; for ( String issueType : issueTypes ) { for ( Map . Entry < String , Set < String > > entry : groupingSpecification . entrySet ( ) ) { String groupName = entry . getKey ( ) ; Set < String > groupTypes = entry . getValue ( ) ; if ( groupTypes . contains ( issueType ) ) { groups . add ( groupName ) ; } } } return groups ; |
public class UISelectMany { /** * < p > Return the number of occurrances of a particular element in the
* array . < / p >
* @ param element object whose occurrance is to be counted in the array .
* @ param array object representing the old value of this component . */
private static int countElementOccurrence ( Object element , Object [ ] array ) { } } | int count = 0 ; for ( int i = 0 ; i < array . length ; ++ i ) { Object arrayElement = array [ i ] ; if ( arrayElement != null && element != null ) { if ( arrayElement . equals ( element ) ) { count ++ ; } } } return count ; |
public class PasswordFilter { /** * / * ( non - Javadoc )
* @ see java . util . logging . Logger # log ( java . util . logging . Level , java . lang . String , java . lang . Object ) */
@ Override public void log ( Level level , String msg , Object param1 ) { } } | super . log ( level , maskPassword ( msg ) , param1 ) ; |
public class BigIntStringChecksum { /** * Return the original BigInteger .
* ( or throw an exception if something went wrong ) .
* @ return BigInteger or throw exception
* @ throws SecretShareException if the hex is invalid */
public BigInteger asBigInteger ( ) { } } | try { return new BigInteger ( asHex , HEX_RADIX ) ; } catch ( NumberFormatException e ) { throw new SecretShareException ( "Invalid input='" + asHex + "'" , e ) ; } |
public class TemplateParser { /** * Find the corresponding TypeMirror from Elemental2 for a given DOM Element
* @ param element The element we want the TypeMirror of
* @ return The type mirror */
private TypeMirror getTypeFromDOMElement ( Element element ) { } } | return DOMElementsUtil . getTypeForElementTag ( element . getStartTag ( ) . getName ( ) ) . map ( Class :: getCanonicalName ) . map ( elements :: getTypeElement ) . map ( TypeElement :: asType ) . orElse ( null ) ; |
public class Application { /** * Get application context .
* @ return application context . */
public static Context getApplicationContext ( ) { } } | com . ardikars . jxnet . context . Context context = com . ardikars . jxnet . context . Application . getApplicationContext ( ) ; return new ApplicationContext ( context ) ; |
public class TermStatementUpdate { /** * Helper to format term updates as expected by the Wikibase API
* @ param updates
* planned updates for the type of term
* @ return map ready to be serialized as JSON by Jackson */
protected Map < String , TermImpl > getMonolingualUpdatedValues ( Map < String , NameWithUpdate > updates ) { } } | Map < String , TermImpl > updatedValues = new HashMap < > ( ) ; for ( NameWithUpdate update : updates . values ( ) ) { if ( ! update . write ) { continue ; } updatedValues . put ( update . value . getLanguageCode ( ) , monolingualToJackson ( update . value ) ) ; } return updatedValues ; |
public class RedisClient { /** * Get a { @ link Mono } that resolves { @ link RedisURI } to a { @ link SocketAddress } . Resolution is performed either using Redis
* Sentinel ( if the { @ link RedisURI } is configured with Sentinels ) or via DNS resolution .
* Subclasses of { @ link RedisClient } may override that method .
* @ param redisURI must not be { @ literal null } .
* @ return the resolved { @ link SocketAddress } .
* @ see ClientResources # dnsResolver ( )
* @ see RedisURI # getSentinels ( )
* @ see RedisURI # getSentinelMasterId ( ) */
protected Mono < SocketAddress > getSocketAddress ( RedisURI redisURI ) { } } | return Mono . defer ( ( ) -> { if ( redisURI . getSentinelMasterId ( ) != null && ! redisURI . getSentinels ( ) . isEmpty ( ) ) { logger . debug ( "Connecting to Redis using Sentinels {}, MasterId {}" , redisURI . getSentinels ( ) , redisURI . getSentinelMasterId ( ) ) ; return lookupRedis ( redisURI ) . switchIfEmpty ( Mono . error ( new RedisConnectionException ( "Cannot provide redisAddress using sentinel for masterId " + redisURI . getSentinelMasterId ( ) ) ) ) ; } else { return Mono . fromCallable ( ( ) -> clientResources . socketAddressResolver ( ) . resolve ( ( redisURI ) ) ) ; } } ) ; |
public class RoundHelper { /** * Source : http : / / www . luschny . de / java / doubleformat . html
* @ param dValue
* the value to be formatted
* @ param nScale
* The precision of the decimal scale . If type is
* { @ link EDecimalType # FIX } the decimal scale , else the ( carrying scale
* - 1 ) . Should be & ge ; 0.
* @ param eType
* The formatting type . May not be < code > null < / code > .
* @ param aLocale
* The locale to be used for the decimal symbols . May not be
* < code > null < / code > .
* @ return the string representation of the double value . For NaN and infinite
* values , the return of { @ link Double # toString ( ) } is returned . */
@ Nonnull public static String getFormatted ( final double dValue , @ Nonnegative final int nScale , @ Nonnull final EDecimalType eType , @ Nonnull final Locale aLocale ) { } } | ValueEnforcer . isGE0 ( nScale , "Scale" ) ; ValueEnforcer . notNull ( eType , "Type" ) ; ValueEnforcer . notNull ( aLocale , "Locale" ) ; if ( Double . isNaN ( dValue ) || Double . isInfinite ( dValue ) ) return Double . toString ( dValue ) ; // Avoid negative scales
final DecimalFormat aDF = ( DecimalFormat ) NumberFormat . getInstance ( aLocale ) ; aDF . setDecimalFormatSymbols ( DecimalFormatSymbols . getInstance ( aLocale ) ) ; aDF . setMaximumFractionDigits ( nScale ) ; aDF . setMinimumFractionDigits ( nScale ) ; if ( eType . isExponential ( ) ) { String sPattern = "0E0" ; if ( nScale > 0 ) sPattern += '.' + StringHelper . getRepeated ( '0' , nScale ) ; aDF . applyPattern ( sPattern ) ; } else { aDF . setGroupingUsed ( false ) ; aDF . setMinimumIntegerDigits ( 1 ) ; } return aDF . format ( dValue ) ; |
public class SegmentFelzenszwalbHuttenlocher04 { /** * Searches for root nodes in the graph and adds their size to the list of region sizes . Makes sure all
* other nodes in the graph point directly at their root . */
protected void computeOutput ( ) { } } | outputRegionId . reset ( ) ; outputRegionSizes . reset ( ) ; for ( int y = 0 ; y < graph . height ; y ++ ) { int indexGraph = graph . startIndex + y * graph . stride ; for ( int x = 0 ; x < graph . width ; x ++ , indexGraph ++ ) { int parent = graph . data [ indexGraph ] ; if ( parent == indexGraph ) { outputRegionId . add ( indexGraph ) ; outputRegionSizes . add ( regionSize . get ( indexGraph ) ) ; } else { // find the parent and set the child to it
int child = indexGraph ; while ( parent != child ) { child = parent ; parent = graph . data [ child ] ; } graph . data [ indexGraph ] = parent ; } } } |
public class Matrix4f { /** * Set this matrix to a spherical billboard transformation that rotates the local + Z axis of a given object with position < code > objPos < / code > towards
* a target position at < code > targetPos < / code > using a shortest arc rotation by not preserving any < i > up < / i > vector of the object .
* This method can be used to create the complete model transformation for a given object , including the translation of the object to
* its position < code > objPos < / code > .
* In order to specify an < i > up < / i > vector which needs to be maintained when rotating the + Z axis of the object ,
* use { @ link # billboardSpherical ( Vector3fc , Vector3fc , Vector3fc ) } .
* @ see # billboardSpherical ( Vector3fc , Vector3fc , Vector3fc )
* @ param objPos
* the position of the object to rotate towards < code > targetPos < / code >
* @ param targetPos
* the position of the target ( for example the camera ) towards which to rotate the object
* @ return this */
public Matrix4f billboardSpherical ( Vector3fc objPos , Vector3fc targetPos ) { } } | float toDirX = targetPos . x ( ) - objPos . x ( ) ; float toDirY = targetPos . y ( ) - objPos . y ( ) ; float toDirZ = targetPos . z ( ) - objPos . z ( ) ; float x = - toDirY ; float y = toDirX ; float w = ( float ) Math . sqrt ( toDirX * toDirX + toDirY * toDirY + toDirZ * toDirZ ) + toDirZ ; float invNorm = ( float ) ( 1.0 / Math . sqrt ( x * x + y * y + w * w ) ) ; x *= invNorm ; y *= invNorm ; w *= invNorm ; float q00 = ( x + x ) * x ; float q11 = ( y + y ) * y ; float q01 = ( x + x ) * y ; float q03 = ( x + x ) * w ; float q13 = ( y + y ) * w ; this . _m00 ( 1.0f - q11 ) ; this . _m01 ( q01 ) ; this . _m02 ( - q13 ) ; this . _m03 ( 0.0f ) ; this . _m10 ( q01 ) ; this . _m11 ( 1.0f - q00 ) ; this . _m12 ( q03 ) ; this . _m13 ( 0.0f ) ; this . _m20 ( q13 ) ; this . _m21 ( - q03 ) ; this . _m22 ( 1.0f - q11 - q00 ) ; this . _m23 ( 0.0f ) ; this . _m30 ( objPos . x ( ) ) ; this . _m31 ( objPos . y ( ) ) ; this . _m32 ( objPos . z ( ) ) ; this . _m33 ( 1.0f ) ; _properties ( PROPERTY_AFFINE | PROPERTY_ORTHONORMAL ) ; return this ; |
public class VoiceApi { /** * Initiate a conference
* Initiate a two - step conference to the specified destination . This places the existing call on hold and creates a new call in the dialing state ( step 1 ) . After initiating the conference you can use [ / voice / calls / { id } / complete - conference ] ( / reference / workspace / Voice / index . html # completeConference ) to complete the conference and bring all parties into the same call ( step 2 ) .
* @ param id The connection ID of the call to start the conference from . This call will be placed on hold . ( required )
* @ param initiateConferenceData ( required )
* @ return ApiSuccessResponse
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiSuccessResponse initiateConference ( String id , InitiateConferenceData initiateConferenceData ) throws ApiException { } } | ApiResponse < ApiSuccessResponse > resp = initiateConferenceWithHttpInfo ( id , initiateConferenceData ) ; return resp . getData ( ) ; |
public class RNAUtils { /** * method to get all nucleotides for one polymer
* @ param polymer
* PolymerNotation
* @ return List of nucleotides of the polmyer
* @ throws RNAUtilsException
* if the polymer is not rna or dna or the nucleotide can not be
* read
* @ throws HELM2HandledException
* if the polymer contains HELM2 features
* @ throws ChemistryException if chemistry engine can not be initialized */
public static List < Nucleotide > getNucleotideList ( PolymerNotation polymer ) throws RNAUtilsException , HELM2HandledException , ChemistryException { } } | checkRNA ( polymer ) ; List < Nucleotide > nucleotides = new ArrayList < Nucleotide > ( ) ; /* check for HELM2Elements */
List < MonomerNotation > monomerNotations = polymer . getPolymerElements ( ) . getListOfElements ( ) ; for ( int i = 0 ; i < monomerNotations . size ( ) ; i ++ ) { MonomerNotation monomerNotation = monomerNotations . get ( i ) ; if ( ( ! ( monomerNotation instanceof MonomerNotationUnitRNA ) ) || Integer . parseInt ( monomerNotation . getCount ( ) ) != 1 ) { LOG . info ( "MonomerNotation contains HELM2 Elements " + monomerNotation ) ; throw new HELM2HandledException ( "HELM2 Elements are involved" ) ; } try { boolean last = false ; if ( i == monomerNotations . size ( ) - 1 ) { last = true ; } nucleotides . add ( NucleotideParser . convertToNucleotide ( monomerNotation . getUnit ( ) , last ) ) ; } catch ( MonomerException | NucleotideLoadingException | NotationException | org . helm . notation2 . exception . NotationException e ) { e . printStackTrace ( ) ; throw new RNAUtilsException ( "Nucleotide can not be read " + e . getMessage ( ) ) ; } } return nucleotides ; |
public class TextAnalyzer { /** * 重新加载配置数据 。
* @ param keywordDefinitions关键词定义
* @ param lengthDefinitions文本长度类别定义 */
void reloadDefinitions ( Map < String , ? extends Object > keywordDefinitions , Map < Integer , ? extends Object > lengthDefinitions ) { } } | this . lengthDefinitions = lengthDefinitions == null ? null : new TreeMap < Integer , Object > ( lengthDefinitions ) ; |
public class Methods { /** * Gets an array of all methods in a class hierarchy walking up to parent classes */
public static Set < Method > getAllMethodsInHierarchy ( Method method ) { } } | LinkedHashSet < Method > allMethods = new LinkedHashSet < > ( ) ; val declaringClass = method . getDeclaringClass ( ) ; return getAllMethodsInHierarchy ( allMethods , declaringClass , method ) ; |
public class DynamicURLClassLoader { /** * Returns the permissions for the given codesource object .
* The implementation of this method first calls super . getPermissions
* and then adds permissions based on the URL of the codesource .
* < p > If the protocol is " file "
* and the path specifies a file , then permission to read that
* file is granted . If protocol is " file " and the path is
* a directory , permission is granted to read all files
* and ( recursively ) all files and subdirectories contained in
* that directory .
* < p > If the protocol is not " file " , then
* to connect to and accept connections from the URL ' s host is granted .
* @ param codesource the codesource
* @ return the permissions granted to the codesource */
@ Override protected PermissionCollection getPermissions ( CodeSource codesource ) { } } | final PermissionCollection perms = super . getPermissions ( codesource ) ; final URL url = codesource . getLocation ( ) ; Permission permission ; URLConnection urlConnection ; try { urlConnection = url . openConnection ( ) ; permission = urlConnection . getPermission ( ) ; } catch ( IOException ioe ) { permission = null ; urlConnection = null ; } if ( ( permission != null ) && ( permission instanceof FilePermission ) ) { // if the permission has a separator char on the end ,
// it means the codebase is a directory , and we need
// to add an additional permission to read recursively
String path = permission . getName ( ) ; if ( path . endsWith ( File . separator ) ) { path += "-" ; // $ NON - NLS - 1 $
permission = new FilePermission ( path , sun . security . util . SecurityConstants . FILE_READ_ACTION ) ; } } else if ( ( permission == null ) && ( URISchemeType . FILE . isURL ( url ) ) ) { String path = url . getFile ( ) . replace ( '/' , File . separatorChar ) ; path = sun . net . www . ParseUtil . decode ( path ) ; if ( path . endsWith ( File . separator ) ) { path += "-" ; // $ NON - NLS - 1 $
} permission = new FilePermission ( path , sun . security . util . SecurityConstants . FILE_READ_ACTION ) ; } else { URL locUrl = url ; if ( urlConnection instanceof JarURLConnection ) { locUrl = ( ( JarURLConnection ) urlConnection ) . getJarFileURL ( ) ; } String host = locUrl . getHost ( ) ; if ( host == null ) { host = "localhost" ; // $ NON - NLS - 1 $
} permission = new SocketPermission ( host , sun . security . util . SecurityConstants . SOCKET_CONNECT_ACCEPT_ACTION ) ; } // make sure the person that created this class loader
// would have this permission
final SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { final Permission fp = permission ; AccessController . doPrivileged ( new PrivilegedAction < Object > ( ) { @ Override public Object run ( ) throws SecurityException { sm . checkPermission ( fp ) ; return null ; } } , this . acc ) ; } perms . add ( permission ) ; return perms ; |
public class HBaseDataHandler { /** * ( non - Javadoc )
* @ see
* com . impetus . client . hbase . admin . DataHandler # readData ( java . lang . String ,
* java . lang . Class , com . impetus . kundera . metadata . model . EntityMetadata ,
* java . lang . String , java . util . List ) */
@ Override public List readAll ( final String tableName , Class clazz , EntityMetadata m , final List < Object > rowKey , List < String > relationNames , String ... columns ) throws IOException { } } | Table hTable = gethTable ( tableName ) ; List < HBaseDataWrapper > results = ( ( HBaseReader ) hbaseReader ) . loadAll ( hTable , rowKey , null , columns ) ; return onRead ( m , null , hTable , results ) ; |
public class StopWords { /** * 读取stopword
* @ param dicPath
* stopword所在地址 */
public void read ( String dicPath ) { } } | File path = new File ( dicPath ) ; if ( path . isDirectory ( ) ) { String [ ] subdir = path . list ( new FilenameFilter ( ) { @ Override public boolean accept ( File dir , String name ) { if ( name . toLowerCase ( ) . endsWith ( "txt" ) ) return true ; else return false ; } } ) ; for ( int i = 0 ; i < subdir . length ; i ++ ) { read ( path + "/" + subdir [ i ] ) ; } return ; } Long newTime = path . lastModified ( ) ; Long lastTime = lastModTime . get ( dicPath ) ; if ( lastTime == null || ! lastTime . equals ( newTime ) ) { // 路径是文件
try { InputStreamReader read = new InputStreamReader ( new FileInputStream ( path ) , "UTF-8" ) ; BufferedReader in = new BufferedReader ( read ) ; String s ; while ( ( s = in . readLine ( ) ) != null ) { s = s . trim ( ) ; if ( ! s . matches ( "^$" ) ) sWord . add ( s ) ; } in . close ( ) ; } catch ( Exception e ) { System . err . println ( "停用词文件路径错误" ) ; } } |
public class FieldPicker { /** * { @ inheritDoc } */
@ Override protected void setLinkProperty ( final UUID _linkTypeUUID , final long _toId , final UUID _toTypeUUID , final String _toName ) throws EFapsException { } } | if ( _linkTypeUUID . equals ( CIAdminUserInterface . LinkField2Command . uuid ) ) { this . commandID = _toId ; } super . setLinkProperty ( _linkTypeUUID , _toId , _toTypeUUID , _toName ) ; |
public class JsMessageVisitor { /** * Converts the given string from upper - underscore case to lower - camel case ,
* preserving numeric suffixes . For example : " NAME " - > " name " " A4 _ LETTER " - >
* " a4Letter " " START _ SPAN _ 1_23 " - > " startSpan _ 1_23 " . */
static String toLowerCamelCaseWithNumericSuffixes ( String input ) { } } | // Determine where the numeric suffixes begin
int suffixStart = input . length ( ) ; while ( suffixStart > 0 ) { char ch = '\0' ; int numberStart = suffixStart ; while ( numberStart > 0 ) { ch = input . charAt ( numberStart - 1 ) ; if ( Character . isDigit ( ch ) ) { numberStart -- ; } else { break ; } } if ( ( numberStart > 0 ) && ( numberStart < suffixStart ) && ( ch == '_' ) ) { suffixStart = numberStart - 1 ; } else { break ; } } if ( suffixStart == input . length ( ) ) { return CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_CAMEL , input ) ; } else { return CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_CAMEL , input . substring ( 0 , suffixStart ) ) + input . substring ( suffixStart ) ; } |
public class FileStore { /** * Load a list of payload
* @ param < T > The payload type
* @ param clazz The class of the payload
* @ return The list of payloads loaded
* @ throws IOException */
public < T extends RoxPayload > List < T > load ( Class < T > clazz ) throws IOException { } } | List < T > payloads = new ArrayList < > ( ) ; for ( File f : getTmpDir ( clazz ) . listFiles ( ) ) { if ( f . isFile ( ) ) { InputStreamReader isr = new InputStreamReader ( new FileInputStream ( f ) , Charset . forName ( Constants . ENCODING ) . newDecoder ( ) ) ; payloads . add ( serializer . deserializePayload ( isr , clazz ) ) ; } } return payloads ; |
public class TextIntWritable { /** * Deserializes a { @ code TextIntWritable } from the provided stream and
* returns the resulting object . */
public static TextIntWritable read ( DataInput in ) throws IOException { } } | TextIntWritable tiw = new TextIntWritable ( ) ; tiw . t . readFields ( in ) ; tiw . position = in . readInt ( ) ; return tiw ; |
public class CBZip2InputStream { /** * Called by recvDecodingTables ( ) exclusively . */
private void createHuffmanDecodingTables ( final int alphaSize , final int nGroups ) { } } | final Data dataShadow = this . data ; final char [ ] [ ] len = dataShadow . temp_charArray2d ; final int [ ] minLens = dataShadow . minLens ; final int [ ] [ ] limit = dataShadow . limit ; final int [ ] [ ] base = dataShadow . base ; final int [ ] [ ] perm = dataShadow . perm ; for ( int t = 0 ; t < nGroups ; t ++ ) { int minLen = 32 ; int maxLen = 0 ; final char [ ] len_t = len [ t ] ; for ( int i = alphaSize ; -- i >= 0 ; ) { final char lent = len_t [ i ] ; if ( lent > maxLen ) { maxLen = lent ; } if ( lent < minLen ) { minLen = lent ; } } hbCreateDecodeTables ( limit [ t ] , base [ t ] , perm [ t ] , len [ t ] , minLen , maxLen , alphaSize ) ; minLens [ t ] = minLen ; } |
public class DnsTextEndpointGroup { /** * Creates a { @ link DnsTextEndpointGroup } that schedules queries on a random { @ link EventLoop } from
* { @ link CommonPools # workerGroup ( ) } .
* @ param hostname the hostname to query DNS queries for
* @ param mapping the { @ link Function } that maps the content of a { @ code TXT } record into
* an { @ link Endpoint } . The { @ link Function } is expected to return { @ code null }
* if the record contains unsupported content . */
public static DnsTextEndpointGroup of ( String hostname , Function < byte [ ] , Endpoint > mapping ) { } } | return new DnsTextEndpointGroupBuilder ( hostname , mapping ) . build ( ) ; |
public class DataGenerator { /** * Main function . It first parses the command line arguments . It then reads
* the directory structure from the input directory structure file and
* creates directory structure in the file system namespace . Afterwards it
* reads the file attributes and creates files in the file . All file content
* is filled with ' a ' .
* @ return */
public int run ( String [ ] args ) throws Exception { } } | int exitCode = 0 ; exitCode = init ( args ) ; if ( exitCode != 0 ) { return exitCode ; } // genDirStructure ( ) ;
genFiles ( ) ; return exitCode ; |
public class SalesforceExtractor { /** * Login to salesforce
* @ return login status */
public boolean bulkApiLogin ( ) throws Exception { } } | log . info ( "Authenticating salesforce bulk api" ) ; boolean success = false ; String hostName = this . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_HOST_NAME ) ; String apiVersion = this . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_VERSION ) ; if ( Strings . isNullOrEmpty ( apiVersion ) ) { // queryAll was introduced in version 39.0 , so need to use a higher version when using queryAll with the bulk api
apiVersion = this . bulkApiUseQueryAll ? "42.0" : "29.0" ; } String soapAuthEndPoint = hostName + SALESFORCE_SOAP_SERVICE + "/" + apiVersion ; try { ConnectorConfig partnerConfig = new ConnectorConfig ( ) ; if ( super . workUnitState . contains ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) && ! super . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) . isEmpty ( ) ) { partnerConfig . setProxy ( super . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) , super . workUnitState . getPropAsInt ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_PORT ) ) ; } String accessToken = sfConnector . getAccessToken ( ) ; if ( accessToken == null ) { boolean isConnectSuccess = sfConnector . connect ( ) ; if ( isConnectSuccess ) { accessToken = sfConnector . getAccessToken ( ) ; } } if ( accessToken != null ) { String serviceEndpoint = sfConnector . getInstanceUrl ( ) + SALESFORCE_SOAP_SERVICE + "/" + apiVersion ; partnerConfig . setSessionId ( accessToken ) ; partnerConfig . setServiceEndpoint ( serviceEndpoint ) ; } else { String securityToken = this . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_SECURITY_TOKEN ) ; String password = PasswordManager . getInstance ( this . workUnitState ) . readPassword ( this . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_PASSWORD ) ) ; partnerConfig . setUsername ( this . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_USERNAME ) ) ; partnerConfig . setPassword ( password + securityToken ) ; } partnerConfig . setAuthEndpoint ( soapAuthEndPoint ) ; new PartnerConnection ( partnerConfig ) ; String soapEndpoint = partnerConfig . getServiceEndpoint ( ) ; String restEndpoint = soapEndpoint . substring ( 0 , soapEndpoint . indexOf ( "Soap/" ) ) + "async/" + apiVersion ; ConnectorConfig config = new ConnectorConfig ( ) ; config . setSessionId ( partnerConfig . getSessionId ( ) ) ; config . setRestEndpoint ( restEndpoint ) ; config . setCompression ( true ) ; config . setTraceFile ( "traceLogs.txt" ) ; config . setTraceMessage ( false ) ; config . setPrettyPrintXml ( true ) ; if ( super . workUnitState . contains ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) && ! super . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) . isEmpty ( ) ) { config . setProxy ( super . workUnitState . getProp ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_URL ) , super . workUnitState . getPropAsInt ( ConfigurationKeys . SOURCE_CONN_USE_PROXY_PORT ) ) ; } this . bulkConnection = new BulkConnection ( config ) ; success = true ; } catch ( RuntimeException e ) { throw new RuntimeException ( "Failed to connect to salesforce bulk api; error - " + e , e ) ; } return success ; |
public class SerialPort { /** * Get the default serial port for the specified platform / model / board revision .
* @ param board hardware board type
* @ return com port device path */
public static String getDefaultPort ( SystemInfo . BoardType board ) throws UnsupportedBoardType { } } | switch ( board ) { // ALL RASPBERRY PI MODELS
// ( except Model 3B )
case RaspberryPi_A : case RaspberryPi_B_Rev1 : case RaspberryPi_B_Rev2 : case RaspberryPi_A_Plus : case RaspberryPi_B_Plus : case RaspberryPi_ComputeModule : case RaspberryPi_2B : case RaspberryPi_Zero : case RaspberryPi_ComputeModule3 : case RaspberryPi_ZeroW : case RaspberryPi_Alpha : case RaspberryPi_Unknown : { return RaspberryPiSerial . DEFAULT_COM_PORT ; } // RASPBERRY PI MODEL 3B , 3B +
case RaspberryPi_3B : case RaspberryPi_3B_Plus : { // if the / dev / ttyS0 port exists , then use it as the default serial port
File s0ComPort = new File ( RaspberryPiSerial . S0_COM_PORT ) ; if ( ( s0ComPort . exists ( ) ) ) { return RaspberryPiSerial . S0_COM_PORT ; } return RaspberryPiSerial . DEFAULT_COM_PORT ; } // BANANAPI and BANANAPRO
case BananaPi : { return BananaPiSerial . DEFAULT_COM_PORT ; } case BananaPro : { return BananaProSerial . DEFAULT_COM_PORT ; } // BPI
// TODO : Implement serial for BPI boards
case Bpi_M1 : break ; case Bpi_M1P : break ; case Bpi_M2 : break ; case Bpi_M2M : break ; case Bpi_M2P : break ; case Bpi_M2P_H2_Plus : break ; case Bpi_M2P_H5 : break ; case Bpi_M2U : break ; case Bpi_M2U_V40 : break ; case Bpi_M3 : break ; case Bpi_M64 : break ; case Bpi_R1 : break ; // NANOPI
// TODO : Implement serial for NanoPi boards
case NanoPi_A64 : break ; case NanoPi_K2 : break ; case NanoPi_M1 : break ; case NanoPi_M1_Plus : break ; case NanoPi_M3 : break ; case NanoPi_NEO : break ; case NanoPi_NEO2 : break ; case NanoPi_NEO2_Plus : break ; case NanoPi_NEO_Air : break ; case NanoPi_S2 : break ; // ODROID
// TODO : Implement serial for Odroid boards
case Odroid : break ; // ORANGEPI
// TODO : Implement serial for OrangePi boards
case OrangePi : break ; // UNKNOWN
case UNKNOWN : break ; default : break ; } // unknown board type , return null
throw new UnsupportedBoardType ( ) ; |
public class UnixCrypt { /** * * Check that enteredPassword encrypts to * encryptedPassword .
* @ param encryptedPassword The encryptedPassword . The first two characters are assumed to be the
* salt . This string would be the same as one found in a Unix / etc / passwd file .
* @ param enteredPassword The password as entered by the user ( or otherwise aquired ) .
* @ return true if the password should be considered correct . */
public final static boolean matches ( String encryptedPassword , String enteredPassword ) { } } | String salt = encryptedPassword . substring ( 0 , 3 ) ; String newCrypt = crypt ( salt , enteredPassword ) ; return newCrypt . equals ( encryptedPassword ) ; |
public class APIAStubFactory { /** * Method to rewrite the default API - A base URL ( specified in the service
* locator class FedoraAPIAServiceLocator ) . In this case we allow the
* protocol , host , and port parts of the service URL to be replaced . A SOAP
* stub will be returned with the desired service endpoint URL .
* @ param protocol
* @ param host
* @ param port
* @ param username
* @ param password
* @ return FedoraAPIA SOAP stub
* @ throws MalformedURLException
* @ throws ServiceException */
public static FedoraAPIA getStub ( String protocol , String host , int port , String username , String password ) throws MalformedURLException , ServiceException { } } | if ( ! "http" . equalsIgnoreCase ( protocol ) && ! "https" . equalsIgnoreCase ( protocol ) ) { throw new javax . xml . rpc . ServiceException ( "The protocol" + " " + protocol + " is not supported by this service." ) ; } Map < String , Object > props = new HashMap < String , Object > ( ) ; props . put ( "mtom-enabled" , Boolean . FALSE ) ; org . apache . cxf . jaxws . JaxWsProxyFactoryBean clientFactory = new org . apache . cxf . jaxws . JaxWsProxyFactoryBean ( ) ; clientFactory . setAddress ( protocol + "://" + host + ":" + port + "/fedora/services/access" ) ; clientFactory . setServiceClass ( FedoraAPIA . class ) ; clientFactory . setUsername ( username ) ; clientFactory . setPassword ( password ) ; clientFactory . setProperties ( props ) ; // LoggingInInterceptor log1 = new LoggingInInterceptor ( new PrintWriter ( System . out ) ) ;
// LoggingOutInterceptor log2 = new LoggingOutInterceptor ( new PrintWriter ( System . out ) ) ;
// clientFactory . getInInterceptors ( ) . add ( log1 ) ;
// clientFactory . getInInterceptors ( ) . add ( log2 ) ;
PrintStream syserr = System . err ; System . setErr ( System . out ) ; FedoraAPIA service = ( FedoraAPIA ) clientFactory . create ( ) ; System . setErr ( syserr ) ; syserr = null ; if ( Administrator . INSTANCE == null ) { // if running without Administrator , don ' t wrap it with the statusbar stuff
return service ; } else { return new APIAStubWrapper ( service ) ; } |
public class MessagingSecurityServiceImpl { /** * Create the User / Group set for a particular Role
* @ param properties
* @ param type
* " User " or " Group "
* @ return
* Set of User / Group */
private Set < String > createUserOrGroupSet ( Dictionary < String , Object > properties , String type ) { } } | SibTr . entry ( tc , CLASS_NAME + "createUserOrGroupSet" , new Object [ ] { properties , type } ) ; Set < String > userOrGroupSet = new HashSet < String > ( ) ; if ( properties != null ) { String [ ] tempUsersOrGroups = ( String [ ] ) properties . get ( type ) ; if ( tempUsersOrGroups != null ) { for ( String tempUserOrGroup : tempUsersOrGroups ) { Dictionary < String , Object > userOrGroupProperties = getDictionaryObject ( tempUserOrGroup ) ; if ( userOrGroupProperties != null ) { String userOrGroup = ( ( String ) userOrGroupProperties . get ( MessagingSecurityConstants . NAME ) ) ; userOrGroupSet . add ( userOrGroup . trim ( ) ) ; } } } } SibTr . exit ( tc , CLASS_NAME + "createUserOrGroupSet" , userOrGroupSet ) ; return userOrGroupSet ; |
public class JcrTools { /** * Upload the content in the supplied stream into the repository at the defined path , using the given session . This method
* will create a ' nt : file ' node at the supplied path , and any non - existant ancestors with nodes of type ' nt : folder ' . As
* defined by the JCR specification , the binary content ( and other properties ) will be placed on a child of the ' nt : file ' node
* named ' jcr : content ' with a node type of ' nt : resource ' .
* This method always closes the supplied stream .
* @ param session the JCR session
* @ param path the path to the file
* @ param stream the stream containing the content to be uploaded
* @ return the newly created ' nt : file ' node
* @ throws RepositoryException if there is a problem uploading the file
* @ throws IOException if there is a problem using the stream
* @ throws IllegalArgumentException is any of the parameters are null */
public Node uploadFile ( Session session , String path , InputStream stream ) throws RepositoryException , IOException { } } | isNotNull ( session , "session" ) ; isNotNull ( path , "path" ) ; isNotNull ( stream , "stream" ) ; Node fileNode = null ; boolean error = false ; try { // Create an ' nt : file ' node at the supplied path , creating any missing intermediate nodes of type ' nt : folder ' . . .
fileNode = findOrCreateNode ( session . getRootNode ( ) , path , "nt:folder" , "nt:file" ) ; // Upload the file to that node . . .
Node contentNode = findOrCreateChild ( fileNode , "jcr:content" , "nt:resource" ) ; Binary binary = session . getValueFactory ( ) . createBinary ( stream ) ; contentNode . setProperty ( "jcr:data" , binary ) ; } catch ( RepositoryException e ) { error = true ; throw e ; } catch ( RuntimeException e ) { error = true ; throw e ; } finally { try { stream . close ( ) ; } catch ( RuntimeException e ) { if ( ! error ) throw e ; // don ' t override any exception thrown in the block above
} } return fileNode ; |
public class OpenIdServiceResponseBuilder { /** * Determine identity .
* @ param service the service
* @ param assertion the assertion
* @ return the string */
protected String determineIdentity ( final OpenIdService service , final Assertion assertion ) { } } | if ( assertion != null && OpenIdProtocolConstants . OPENID_IDENTIFIERSELECT . equals ( service . getIdentity ( ) ) ) { return this . openIdPrefixUrl + '/' + assertion . getPrimaryAuthentication ( ) . getPrincipal ( ) . getId ( ) ; } return service . getIdentity ( ) ; |
public class CommsUtils { /** * Determines whether a message is recoverable compared to the supplied maxUnrecoverableReliability .
* @ param mess the message to check .
* @ para maxUnrecoverableReliability the most reliable reliability that is considered unrecoverable in the context in which this method is executed .
* @ return true for any message which is more recoverable than maxUnrecoverableReliability , otherwise a false is returned . */
public static boolean isRecoverable ( final SIBusMessage mess , final Reliability maxUnrecoverableReliability ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isRecoverable" , new Object [ ] { mess , maxUnrecoverableReliability } ) ; final Reliability messageReliability = mess . getReliability ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Message Reliability: " , messageReliability ) ; final boolean recoverable = messageReliability . compareTo ( maxUnrecoverableReliability ) > 0 ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isRecoverable" , recoverable ) ; return recoverable ; |
public class AbstractRemoteClient { /** * { @ inheritDoc }
* @ throws InterruptedException { @ inheritDoc }
* @ throws CouldNotPerformException { @ inheritDoc } */
@ Override public void deactivate ( ) throws InterruptedException , CouldNotPerformException { } } | synchronized ( maintainerLock ) { try { verifyMaintainability ( ) ; validateInitialization ( ) ; } catch ( InvalidStateException ex ) { // was never initialized !
return ; } if ( connectionState != RECONNECTING ) { skipSyncTasks ( ) ; setConnectionState ( DISCONNECTED ) ; if ( pingTask != null && ! pingTask . isDone ( ) ) { pingTask . cancel ( true ) ; } } if ( listenerWatchDog != null ) { listenerWatchDog . deactivate ( ) ; } if ( remoteServerWatchDog != null ) { remoteServerWatchDog . deactivate ( ) ; } } synchronized ( connectionMonitor ) { connectionMonitor . notifyAll ( ) ; } |
public class RtfColor { /** * Writes the beginning of this RtfColor */
public void writeBegin ( final OutputStream result ) { } } | try { result . write ( COLOR_NUMBER ) ; result . write ( intToByteArray ( colorNumber ) ) ; } catch ( IOException ioe ) { ioe . printStackTrace ( ) ; } |
public class CreateIndexRequest { /** * Specifies the attributes that should be indexed on . Currently only a single attribute is supported .
* @ param orderedIndexedAttributeList
* Specifies the attributes that should be indexed on . Currently only a single attribute is supported . */
public void setOrderedIndexedAttributeList ( java . util . Collection < AttributeKey > orderedIndexedAttributeList ) { } } | if ( orderedIndexedAttributeList == null ) { this . orderedIndexedAttributeList = null ; return ; } this . orderedIndexedAttributeList = new java . util . ArrayList < AttributeKey > ( orderedIndexedAttributeList ) ; |
public class BaseDatasourceFactory { /** * Get and populate the pooled data source object for this database .
* @ param database The JDBC database to create a connection to .
* @ return The pooled datasource . */
public DataSource getFakePooledDataSource ( JdbcDatabase database ) { } } | ComboPooledDataSource dataSource = new ComboPooledDataSource ( ) ; this . setDatasourceParams ( database , dataSource ) ; return dataSource ; |
public class PagingPredicate { /** * Used if inner predicate is instanceof { @ link IndexAwarePredicate } for checking if indexed .
* @ param queryContext
* @ return */
@ Override public boolean isIndexed ( QueryContext queryContext ) { } } | if ( predicate instanceof IndexAwarePredicate ) { return ( ( IndexAwarePredicate ) predicate ) . isIndexed ( queryContext ) ; } return false ; |
public class GenericGenerators { /** * Calls a method with a set of arguments . After execution the stack may have an extra item pushed on it : the object that was returned
* by this method ( if any ) .
* @ param internalClassName name of class that { @ code method } belongs to ( this is the internal Java name where dots are replaced with
* slashes )
* @ param method method node that describes the method to call
* @ param args method argument instruction lists - - each instruction list must leave one item on the stack of the type expected
* by the method ( note that if this is a non - static method , the first argument must always evaluate to the " this " pointer / reference )
* @ return instructions to invoke a method
* @ throws NullPointerException if any argument is { @ code null } or array contains { @ code null }
* @ throws IllegalArgumentException if the length of { @ code args } doesn ' t match the number of parameters in { @ code method } */
public static InsnList call ( String internalClassName , MethodNode method , InsnList ... args ) { } } | Validate . notNull ( internalClassName ) ; Validate . notNull ( method ) ; Validate . notNull ( args ) ; Validate . noNullElements ( args ) ; InsnList ret = new InsnList ( ) ; for ( InsnList arg : args ) { ret . add ( arg ) ; } Type [ ] argTypes = Type . getMethodType ( method . desc ) . getArgumentTypes ( ) ; if ( ( method . access & Opcodes . ACC_STATIC ) == Opcodes . ACC_STATIC ) { Validate . isTrue ( argTypes . length == args . length ) ; ret . add ( new MethodInsnNode ( Opcodes . INVOKESTATIC , internalClassName , method . name , method . desc , false ) ) ; } else if ( ( method . access & Opcodes . ACC_INTERFACE ) == Opcodes . ACC_INTERFACE ) { Validate . isTrue ( argTypes . length + 1 == args . length ) ; ret . add ( new MethodInsnNode ( Opcodes . INVOKEINTERFACE , internalClassName , method . name , method . desc , true ) ) ; } else { Validate . isTrue ( argTypes . length + 1 == args . length ) ; ret . add ( new MethodInsnNode ( Opcodes . INVOKEVIRTUAL , internalClassName , method . name , method . desc , false ) ) ; } return ret ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.