signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class CSSReaderDeclarationList { /** * Set the default CSS parse exception handler ( for unrecoverable errors ) .
* @ param aDefaultParseExceptionHandler
* The new default exception handler to be used . May not be
* < code > null < / code > .
* @ since 3.7.4 */
public static void setDefaultParseExceptionHandler ( @ Nonnull final ICSSParseExceptionCallback aDefaultParseExceptionHandler ) { } }
|
ValueEnforcer . notNull ( aDefaultParseExceptionHandler , "DefaultParseExceptionHandler" ) ; s_aRWLock . writeLocked ( ( ) -> s_aDefaultParseExceptionHandler = aDefaultParseExceptionHandler ) ;
|
public class TypeMaker { /** * Convert a list of javac types into an array of javadoc types . */
public static com . sun . javadoc . Type [ ] getTypes ( DocEnv env , List < Type > ts ) { } }
|
return getTypes ( env , ts , new com . sun . javadoc . Type [ ts . length ( ) ] ) ;
|
public class GetRoutesResult { /** * The elements from this collection .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the
* existing values .
* @ param items
* The elements from this collection .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetRoutesResult withItems ( Route ... items ) { } }
|
if ( this . items == null ) { setItems ( new java . util . ArrayList < Route > ( items . length ) ) ; } for ( Route ele : items ) { this . items . add ( ele ) ; } return this ;
|
public class HttpHeaders { /** * Return the list of acceptable { @ linkplain ContentCodingType coding types } , as
* specified by the { @ code Accept - Encoding } header .
* < p > Returns an empty list when the acceptable content coding types are unspecified .
* @ return the acceptable content coding types */
public List < ContentCodingType > getAcceptEncoding ( ) { } }
|
String value = getFirst ( ACCEPT_ENCODING ) ; return ( value != null ? ContentCodingType . parseCodingTypes ( value ) : Collections . < ContentCodingType > emptyList ( ) ) ;
|
public class CmsCategoryService { /** * Adds all categories from one resource to another , skipping categories that are not available for the resource copied to .
* The resource where categories are copied to has to be locked .
* @ param cms the CmsObject used for reading and writing .
* @ param fromResource the resource to copy the categories from .
* @ param toResourceSitePath the full site path of the resource to copy the categories to .
* @ throws CmsException thrown if copying the resources fails . */
public void copyCategories ( CmsObject cms , CmsResource fromResource , String toResourceSitePath ) throws CmsException { } }
|
List < CmsCategory > categories = readResourceCategories ( cms , fromResource ) ; for ( CmsCategory category : categories ) { addResourceToCategory ( cms , toResourceSitePath , category ) ; }
|
public class CSVUtil { /** * Load the data from CSV .
* @ param csvFile
* @ param offset
* @ param count
* @ param filter
* @ param columnTypeMap
* @ return */
@ SuppressWarnings ( "rawtypes" ) public static < E extends Exception > DataSet loadCSV ( final File csvFile , final long offset , final long count , final Try . Predicate < String [ ] , E > filter , final Map < String , ? extends Type > columnTypeMap ) throws UncheckedIOException , E { } }
|
InputStream csvInputStream = null ; try { csvInputStream = new FileInputStream ( csvFile ) ; return loadCSV ( csvInputStream , offset , count , filter , columnTypeMap ) ; } catch ( IOException e ) { throw new UncheckedIOException ( e ) ; } finally { IOUtil . closeQuietly ( csvInputStream ) ; }
|
public class GraphvizInjectionPlanVisitor { /** * Produce a Graphviz DOT string for a given TANG injection plan .
* @ param injectionPlan TANG injection plan .
* @ param showLegend if true , show legend on the graph .
* @ return Injection plan represented as a string in Graphviz DOT format . */
public static String getGraphvizString ( final InjectionPlan < ? > injectionPlan , final boolean showLegend ) { } }
|
final GraphvizInjectionPlanVisitor visitor = new GraphvizInjectionPlanVisitor ( showLegend ) ; Walk . preorder ( visitor , visitor , injectionPlan ) ; return visitor . toString ( ) ;
|
public class OptimizedSIXAResourceProxy { /** * @ return Returns the lowest message priority of either the joined resource ( if we are joined
* to one ) , or us if we are not .
* @ see com . ibm . ws . sib . comms . client . Transaction # getLowestMessagePriority ( ) */
public short getLowestMessagePriority ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getLowestMessagePriority" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Joined resource:" , joinedResource ) ; final short result ; if ( joinedResource != null ) { result = joinedResource . getLowestMessagePriority ( ) ; } else { result = super . getLowestMessagePriority ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getLowestMessagePriority" , result ) ; return result ;
|
public class ModifyVpcEndpointRequest { /** * ( Gateway endpoint ) One or more route tables IDs to associate with the endpoint .
* @ param addRouteTableIds
* ( Gateway endpoint ) One or more route tables IDs to associate with the endpoint . */
public void setAddRouteTableIds ( java . util . Collection < String > addRouteTableIds ) { } }
|
if ( addRouteTableIds == null ) { this . addRouteTableIds = null ; return ; } this . addRouteTableIds = new com . amazonaws . internal . SdkInternalList < String > ( addRouteTableIds ) ;
|
public class Capsule3d { /** * Set the second point of the capsule ' s segment .
* @ param point the new second point for the capsule ' s segment . . */
public void setMedial2Properties ( Point3d point ) { } }
|
this . medial2 . setProperties ( point . xProperty , point . yProperty , point . zProperty ) ; ensureAIsLowerPoint ( ) ;
|
public class PojoWrapper { /** * / * ( non - Javadoc )
* @ see groovy . lang . GroovyObject # setProperty ( java . lang . String , java . lang . Object ) */
public void setProperty ( final String property , final Object newValue ) { } }
|
this . delegate . setProperty ( this . wrapped , property , newValue ) ;
|
public class MVELDialectRuntimeData { /** * TODO : FIXME : make it consistent with above */
public void removeFunction ( KnowledgePackageImpl pkg , org . drools . core . rule . Function function ) { } }
|
this . functionFactory . removeFunction ( function . getName ( ) ) ;
|
public class HBCIInstitute { /** * Aktualisiert die BPD bei Bedarf . */
void fetchBPDAnonymous ( ) { } }
|
// BPD abholen , wenn nicht vorhanden oder HBCI - Version geaendert
Map < String , String > bpd = passport . getBPD ( ) ; String hbciVersionOfBPD = ( bpd != null ) ? bpd . get ( BPD_KEY_HBCIVERSION ) : null ; final String version = passport . getBPDVersion ( ) ; if ( version . equals ( "0" ) || isBPDExpired ( ) || hbciVersionOfBPD == null || ! hbciVersionOfBPD . equals ( passport . getHBCIVersion ( ) ) ) { try { // Wenn wir die BPD per anonymem Dialog neu abrufen , muessen wir sicherstellen ,
// dass die BPD - Version im Passport auf " 0 " zurueckgesetzt ist . Denn wenn die
// Bank den anonymen Abruf nicht unterstuetzt , wuerde dieser Abruf hier fehlschlagen ,
// der erneute Versuch mit authentifiziertem Dialog wuerde jedoch nicht zum
// Neuabruf der BPD fuehren , da dort ( in HBCIUser # fetchUPD bzw . HBCIDialog # doDialogInit )
// weiterhin die ( u . U . ja noch aktuelle ) BPD - Version an die Bank geschickt wird
// und diese daraufhin keine neuen BPD schickt . Das wuerde in einer endlosen
// Schleife enden , in der wir hier immer wieder versuchen wuerden , neu abzurufen
// ( weil expired ) . Siehe https : / / www . willuhn . de / bugzilla / show _ bug . cgi ? id = 1567
// Also muessen wir die BPD - Version auf 0 setzen . Fuer den Fall , dass wir in dem
// " if " hier aus einem der anderen beiden o . g . Gruende ( BPD - Expiry oder neue HBCI - Version )
// gelandet sind .
if ( ! version . equals ( "0" ) ) { log . info ( "resetting BPD version from " + version + " to 0" ) ; passport . getBPD ( ) . put ( "BPA.version" , "0" ) ; } passport . getCallback ( ) . status ( HBCICallback . STATUS_INST_BPD_INIT , null ) ; log . info ( "fetching BPD" ) ; HBCIMsgStatus msgStatus = anonymousDialogInit ( ) ; updateBPD ( msgStatus . getData ( ) ) ; if ( ! msgStatus . isDialogClosed ( ) ) { anonymousDialogEnd ( msgStatus . getData ( ) ) ; } if ( ! msgStatus . isOK ( ) ) { log . error ( "fetching BPD failed" ) ; throw new ProcessException ( HBCIUtils . getLocMsg ( "ERR_INST_BPDFAILED" ) , msgStatus ) ; } } catch ( HBCI_Exception e ) { if ( e . isFatal ( ) ) throw e ; } catch ( Exception e ) { // Viele Kreditinstitute unterstützen den anonymen Login nicht . Dass sollte nicht als Fehler den
// Anwender beunruhigen
log . info ( "FAILED! - maybe this institute does not support anonymous logins" ) ; log . info ( "we will nevertheless go on" ) ; } } // ueberpruefen , ob angeforderte sicherheitsmethode auch
// tatsaechlich unterstuetzt wird
log . debug ( "checking if requested hbci parameters are supported" ) ; if ( passport . getBPD ( ) != null ) { if ( ! Arrays . asList ( passport . getSuppVersions ( ) ) . contains ( passport . getHBCIVersion ( ) ) ) { String msg = HBCIUtils . getLocMsg ( "EXCMSG_VERSIONNOTSUPP" ) ; throw new InvalidUserDataException ( msg ) ; } } else { log . warn ( "can not check if requested parameters are supported" ) ; }
|
public class UserStub { /** * Get element by index
* @ param n Index
* @ return Element corresponding to index */
public Object productElement ( int n ) { } }
|
assert n >= 0 && n < 3 ; switch ( n ) { case 0 : return dataType ; case 1 : return geographicalLocation ; case 2 : return isSecret ; default : throw new IllegalArgumentException ( n + " is not an allowed index into UserStub!" ) ; }
|
public class DescribeIndexFieldsResult { /** * The index fields configured for the domain .
* @ param indexFields
* The index fields configured for the domain . */
public void setIndexFields ( java . util . Collection < IndexFieldStatus > indexFields ) { } }
|
if ( indexFields == null ) { this . indexFields = null ; return ; } this . indexFields = new com . amazonaws . internal . SdkInternalList < IndexFieldStatus > ( indexFields ) ;
|
public class IndexDefinition { /** * Gets the .
* @ return the list < index definition >
* @ throws EFapsException on error */
public static List < IndexDefinition > get ( ) throws EFapsException { } }
|
final List < IndexDefinition > ret = new ArrayList < > ( ) ; final QueryBuilder queryBldr = new QueryBuilder ( CIAdminIndex . IndexDefinition ) ; final MultiPrintQuery multi = queryBldr . getPrint ( ) ; final SelectBuilder selUUID = SelectBuilder . get ( ) . linkto ( CIAdminIndex . IndexDefinition . TypeLink ) . attribute ( CIAdminDataModel . Type . UUID ) ; multi . addSelect ( selUUID ) ; multi . execute ( ) ; while ( multi . next ( ) ) { final UUID uuidTmp = UUID . fromString ( multi . getSelect ( selUUID ) ) ; final Set < Type > types = getChildTypes ( Type . get ( uuidTmp ) ) ; for ( final Type type : types ) { final IndexDefinition indexDef = IndexDefinition . get ( type . getUUID ( ) ) ; if ( ! type . isAbstract ( ) ) { ret . add ( indexDef ) ; } } } return ret ;
|
public class PathComputer { /** * Computes the directory where the atlas resides given a command - line
* argument provided by the user . If the argument is not given , then
* this method should be called with a null argument .
* @ param name Path given by user at the command - line to refer to the atlas
* @ return Directory where atlas resides , based on the given argument */
static public File computeAtlasDir ( String name ) { } }
|
File atlasDir = null ; if ( null == name ) { // Current dir
atlasDir = new File ( "." ) ; } else { atlasDir = new File ( name ) ; } // Force absolute
if ( false == atlasDir . isAbsolute ( ) ) { atlasDir = atlasDir . getAbsoluteFile ( ) ; } return atlasDir ;
|
public class TimeBoundedStreamJoin { /** * Register a timer for cleaning up rows in a specified time .
* @ param ctx the context to register timer
* @ param rowTime time for the input row
* @ param leftRow whether this row comes from the left stream */
private void registerCleanUpTimer ( Context ctx , long rowTime , boolean leftRow ) throws IOException { } }
|
if ( leftRow ) { long cleanUpTime = rowTime + leftRelativeSize + minCleanUpInterval + allowedLateness + 1 ; registerTimer ( ctx , cleanUpTime ) ; rightTimerState . update ( cleanUpTime ) ; } else { long cleanUpTime = rowTime + rightRelativeSize + minCleanUpInterval + allowedLateness + 1 ; registerTimer ( ctx , cleanUpTime ) ; leftTimerState . update ( cleanUpTime ) ; }
|
public class ManagedCloudSdk { /** * For " LATEST " version SDKs , the client tooling must keep the SDK up - to - date manually , check with
* { @ link # isUpToDate ( ) } before using , returns a new updater if sdk is " LATEST " , it will throw a
* { @ link UnsupportedOperationException } if SDK is a fixed version ( fixed versions should never be
* udpated ) . */
public SdkUpdater newUpdater ( ) { } }
|
if ( version != Version . LATEST ) { throw new UnsupportedOperationException ( "Cannot update a fixed version SDK." ) ; } return SdkUpdater . newUpdater ( osInfo . name ( ) , getGcloudPath ( ) ) ;
|
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public GSLELINEEND createGSLELINEENDFromString ( EDataType eDataType , String initialValue ) { } }
|
GSLELINEEND result = GSLELINEEND . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CoverageFunctionType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link CoverageFunctionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "coverageFunction" ) public JAXBElement < CoverageFunctionType > createCoverageFunction ( CoverageFunctionType value ) { } }
|
return new JAXBElement < CoverageFunctionType > ( _CoverageFunction_QNAME , CoverageFunctionType . class , null , value ) ;
|
public class SenseDataDescriptor { /** * Serializes the fields common to all sense data descriptors .
* @ param byteBuffer where the serialized fields will be stored
* @ param index the position of the first byte of the sense data descriptor in the { @ link ByteBuffer } */
private final void serializeCommonFields ( final ByteBuffer byteBuffer , final int index ) { } }
|
byteBuffer . position ( index ) ; byteBuffer . put ( descriptorType . getValue ( ) ) ; byteBuffer . put ( ( byte ) additionalLength ) ;
|
public class H2StreamProcessor { /** * Check to see if a writing out a frame will cause the stream or connection window to go exceeded
* @ return true if the write window would be exceeded by writing the frame */
private boolean isWindowLimitExceeded ( FrameData dataFrame ) { } }
|
if ( streamWindowUpdateWriteLimit - dataFrame . getPayloadLength ( ) < 0 || muxLink . getWorkQ ( ) . getConnectionWriteLimit ( ) - dataFrame . getPayloadLength ( ) < 0 ) { // would exceed window update limit
String s = "Cannot write Data Frame because it would exceed the stream window update limit." + "streamWindowUpdateWriteLimit: " + streamWindowUpdateWriteLimit + "\nstreamWindowUpdateWriteInitialSize: " + streamWindowUpdateWriteInitialSize + "\nconnection window size: " + muxLink . getWorkQ ( ) . getConnectionWriteLimit ( ) + "\nframe size: " + dataFrame . getPayloadLength ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , s ) ; } return true ; } return false ;
|
public class CmsGallerySearchResult { /** * Returns the list of Solr fields a search result must have to initialize the gallery search result correctly .
* @ return the list of Solr fields . */
public static final String [ ] getRequiredSolrFields ( ) { } }
|
if ( null == m_requiredSolrFields ) { List < Locale > locales = OpenCms . getLocaleManager ( ) . getAvailableLocales ( ) ; m_requiredSolrFields = new String [ 14 + ( locales . size ( ) * 6 ) ] ; int count = 0 ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_PATH ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_TYPE ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_DATE_CREATED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_DATE_LASTMODIFIED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_DATE_EXPIRED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_DATE_RELEASED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_SIZE ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_STATE ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_USER_CREATED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_ID ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_USER_LAST_MODIFIED ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_ADDITIONAL_INFO ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_CONTAINER_TYPES ; m_requiredSolrFields [ count ++ ] = CmsSearchField . FIELD_RESOURCE_LOCALES ; for ( Locale locale : locales ) { m_requiredSolrFields [ count ++ ] = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_TITLE_UNSTORED , locale . toString ( ) ) + "_s" ; m_requiredSolrFields [ count ++ ] = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsPropertyDefinition . PROPERTY_TITLE , locale . toString ( ) ) + CmsSearchField . FIELD_DYNAMIC_PROPERTIES_DIRECT + "_s" ; m_requiredSolrFields [ count ++ ] = CmsPropertyDefinition . PROPERTY_TITLE + CmsSearchField . FIELD_DYNAMIC_PROPERTIES_DIRECT + "_s" ; m_requiredSolrFields [ count ++ ] = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsSearchField . FIELD_DESCRIPTION , locale . toString ( ) ) + "_s" ; m_requiredSolrFields [ count ++ ] = CmsSearchFieldConfiguration . getLocaleExtendedName ( CmsPropertyDefinition . PROPERTY_DESCRIPTION , locale . toString ( ) ) + CmsSearchField . FIELD_DYNAMIC_PROPERTIES + "_s" ; m_requiredSolrFields [ count ++ ] = CmsPropertyDefinition . PROPERTY_DESCRIPTION + CmsSearchField . FIELD_DYNAMIC_PROPERTIES + "_s" ; } } return m_requiredSolrFields ;
|
public class Log { /** * Send an { @ link # Constants . ERROR } log message .
* @ param tag
* Used to identify the source of a log message . It usually
* identifies the class or activity where the log call occurs .
* @ param msg
* The message you would like logged . */
public static int e ( String tag , String msg ) { } }
|
collectLogEntry ( Constants . ERROR , tag , msg , null ) ; if ( isLoggable ( tag , Constants . ERROR ) ) { return android . util . Log . e ( tag , msg ) ; } return 0 ;
|
public class CPadawan { /** * Escape the given string to be a valid content for an xml attribute
* @ param in the string to escape
* @ return the escaped string */
public final String escapeAttribute ( String in ) { } }
|
in = escape ( in ) ; in = in . replaceAll ( "\"" , """ ) ; return in ;
|
public class MongoDBClient { /** * Loads columns from multiple rows restricting results to conditions stored
* in < code > filterClauseQueue < / code > .
* @ param < E >
* the element type
* @ param entityMetadata
* the entity metadata
* @ param mongoQuery
* the mongo query
* @ param relationNames
* the relation names
* @ param orderBy
* the order by
* @ param maxResult
* the max result
* @ param firstResult
* the first result
* @ param isCountQuery
* the is count query
* @ param keys
* the keys
* @ param results
* the results
* @ return the list
* @ throws Exception
* the exception */
public < E > List < E > loadData ( EntityMetadata entityMetadata , BasicDBObject mongoQuery , List < String > relationNames , BasicDBObject orderBy , int maxResult , int firstResult , boolean isCountQuery , BasicDBObject keys , String ... results ) throws Exception { } }
|
MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( entityMetadata . getPersistenceUnit ( ) ) ; AbstractManagedType managedType = ( AbstractManagedType ) metaModel . entity ( entityMetadata . getEntityClazz ( ) ) ; boolean hasLob = managedType . hasLobAttribute ( ) ; return ( List < E > ) ( ! hasLob ? loadQueryData ( entityMetadata , mongoQuery , orderBy , maxResult , firstResult , isCountQuery , keys , results ) : loadQueryDataGFS ( entityMetadata , mongoQuery , orderBy , maxResult , firstResult , isCountQuery ) ) ;
|
public class TrainingsImpl { /** * Associate a set of images with a set of tags .
* @ param projectId The project id
* @ param createImageTagsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ImageTagCreateSummary object */
public Observable < ImageTagCreateSummary > createImageTagsAsync ( UUID projectId , CreateImageTagsOptionalParameter createImageTagsOptionalParameter ) { } }
|
return createImageTagsWithServiceResponseAsync ( projectId , createImageTagsOptionalParameter ) . map ( new Func1 < ServiceResponse < ImageTagCreateSummary > , ImageTagCreateSummary > ( ) { @ Override public ImageTagCreateSummary call ( ServiceResponse < ImageTagCreateSummary > response ) { return response . body ( ) ; } } ) ;
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/tunnel/2.0" , name = "_GenericApplicationPropertyOfDoor" ) public JAXBElement < Object > create_GenericApplicationPropertyOfDoor ( Object value ) { } }
|
return new JAXBElement < Object > ( __GenericApplicationPropertyOfDoor_QNAME , Object . class , null , value ) ;
|
public class CPTaxCategoryLocalServiceBaseImpl { /** * Updates the cp tax category in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param cpTaxCategory the cp tax category
* @ return the cp tax category that was updated */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CPTaxCategory updateCPTaxCategory ( CPTaxCategory cpTaxCategory ) { } }
|
return cpTaxCategoryPersistence . update ( cpTaxCategory ) ;
|
public class Specifications { /** * Returns the offset constant to this variable . */
public static String offsetName ( String varName ) { } }
|
return CaseFormat . LOWER_CAMEL . to ( CaseFormat . UPPER_UNDERSCORE , varName ) + "_OFFSET" ;
|
public class RxInstrumentedWrappers { /** * Wrap a observer .
* @ param downstream The downstream observer
* @ param instrumentations The instrumentations
* @ return The wrapped subscriber */
static CompletableObserver wrap ( CompletableObserver downstream , List < RunnableInstrumenter > instrumentations ) { } }
|
return new RxInstrumentedCompletableObserver ( downstream , instrumentations ) ;
|
public class LocalQPConsumerKey { /** * Retrieve the next message using an appropriate cursor .
* if the classification parameter is 0 then the default ( unclassified ) cursor
* will be used .
* @ param classification
* @ return
* @ throws MessageStoreException */
protected SIMPMessage getMessageLocked ( int classification ) throws MessageStoreException { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getMessageLocked" , Integer . valueOf ( classification ) ) ; SIMPMessage msg = null ; if ( ! classifyingMessages ) msg = ( SIMPMessage ) getDefaultGetCursor ( ) . next ( ) ; else msg = ( SIMPMessage ) getGetCursor ( classification ) . next ( ) ; if ( msg != null ) msg . setLocalisingME ( consumerDispatcher . getMessageProcessor ( ) . getMessagingEngineUuid ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getMessageLocked" , msg ) ; return msg ;
|
public class TabbedPaneSourceDragAdapter { /** * { @ inheritDoc } */
@ Override public void mouseDragDetected ( final MouseEvent mouseEvent ) { } }
|
final ToggleButton b = ( ToggleButton ) mouseEvent . getSource ( ) ; final Dragboard db = b . startDragAndDrop ( TransferMode . MOVE ) ; // Put a TabBB on a dragboard
final ClipboardContent content = new ClipboardContent ( ) ; content . put ( CustomDataFormat . DOCKABLE , b . getUserData ( ) ) ; db . setContent ( content ) ; mouseEvent . consume ( ) ;
|
public class DroppedFrameDetector { /** * Choreographer . FrameCallback */
@ Override public void doFrame ( long timestampNanoseconds ) { } }
|
long frameIntervalNanoseconds = timestampNanoseconds - lastTimestampNanoseconds ; // To detect a dropped frame , we need to know the interval between two frame callbacks .
// If this is the first , wait for the second .
if ( lastTimestampNanoseconds != NEVER ) { // With no dropped frames , frame intervals will roughly equal the hardware interval .
// 2x the hardware interval means we definitely dropped one frame .
// So our measuring stick is 1.5x .
double droppedFrameIntervalSeconds = hardwareFrameIntervalSeconds * 1.5 ; double frameIntervalSeconds = frameIntervalNanoseconds / 1_000_000_000.0 ; if ( droppedFrameIntervalSeconds < frameIntervalSeconds ) { playTickSound ( ) ; if ( areHapticsEnabled ) { playTickHaptics ( ) ; } } } lastTimestampNanoseconds = timestampNanoseconds ; Choreographer . getInstance ( ) . postFrameCallback ( this ) ;
|
public class ResponseTimeImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . map . api . primitives . MAPAsnPrimitive # encodeData
* ( org . mobicents . protocols . asn . AsnOutputStream ) */
public void encodeData ( AsnOutputStream asnOs ) throws MAPException { } }
|
if ( this . responseTimeCategory == null ) { throw new MAPException ( "Error while encoding " + _PrimitiveName + " the mandatory parameter responseTimeCategory is not defined" ) ; } try { asnOs . writeInteger ( Tag . CLASS_UNIVERSAL , Tag . ENUMERATED , this . responseTimeCategory . getCategory ( ) ) ; } catch ( IOException e ) { throw new MAPException ( "IOException when encoding responseTimeCategory: " , e ) ; } catch ( AsnException e ) { throw new MAPException ( "AsnException when encoding responseTimeCategory: " , e ) ; }
|
public class MutableRoaringBitmap { /** * In - place bitwise AND ( intersection ) operation . The current bitmap is modified .
* @ param array other bitmap */
public void and ( final ImmutableRoaringBitmap array ) { } }
|
int pos1 = 0 , pos2 = 0 , intersectionSize = 0 ; final int length1 = highLowContainer . size ( ) , length2 = array . highLowContainer . size ( ) ; while ( pos1 < length1 && pos2 < length2 ) { final short s1 = highLowContainer . getKeyAtIndex ( pos1 ) ; final short s2 = array . highLowContainer . getKeyAtIndex ( pos2 ) ; if ( s1 == s2 ) { final MappeableContainer c1 = highLowContainer . getContainerAtIndex ( pos1 ) ; final MappeableContainer c2 = array . highLowContainer . getContainerAtIndex ( pos2 ) ; final MappeableContainer c = c1 . iand ( c2 ) ; if ( ! c . isEmpty ( ) ) { getMappeableRoaringArray ( ) . replaceKeyAndContainerAtIndex ( intersectionSize ++ , s1 , c ) ; } ++ pos1 ; ++ pos2 ; } else if ( Util . compareUnsigned ( s1 , s2 ) < 0 ) { // s1 < s2
pos1 = highLowContainer . advanceUntil ( s2 , pos1 ) ; } else { // s1 > s2
pos2 = array . highLowContainer . advanceUntil ( s1 , pos2 ) ; } } getMappeableRoaringArray ( ) . resize ( intersectionSize ) ;
|
public class MessageProcessor { /** * Invokes the appropriate message handler of the passed instance . Caller is responsible for not passing < code > null < / code > messages . */
@ Override public List < KeyedMessageWithType > invoke ( final T instance , final KeyedMessage message ) throws DempsyException { } }
|
if ( ! isMessageSupported ( message . message ) ) throw new IllegalArgumentException ( mpClassName + ": no handler for messages of type: " + message . message . getClass ( ) . getName ( ) ) ; final Object returnValue = wrap ( ( ) -> invocationMethods . invokeMethod ( instance , message . message ) ) ; return returnValue == null ? null : convertToKeyedMessage ( returnValue ) ;
|
public class SleepingMillisIdleStrategy { /** * { @ inheritDoc } */
public void idle ( final int workCount ) { } }
|
if ( workCount > 0 ) { return ; } try { Thread . sleep ( sleepPeriodMs ) ; } catch ( final InterruptedException ignore ) { Thread . currentThread ( ) . interrupt ( ) ; }
|
public class AntFileSetSourceAnalyzer { private void processFileSet ( FileSet fileSet , RuleSet ruleSet , ExecutorService pool ) { } }
|
DirectoryScanner dirScanner = fileSet . getDirectoryScanner ( project ) ; File baseDir = fileSet . getDir ( project ) ; String [ ] includedFiles = dirScanner . getIncludedFiles ( ) ; if ( includedFiles == null || includedFiles . length == 0 ) { LOG . info ( "No matching files found for FileSet with basedir [" + baseDir + "]" ) ; return ; } for ( String filePath : includedFiles ) { Runnable task = buildTask ( baseDir , filePath , ruleSet ) ; pool . submit ( task ) ; }
|
public class VMath { /** * Get a submatrix .
* @ param m1 Input matrix
* @ param r0 Initial row index
* @ param r1 Final row index ( exclusive )
* @ param c Array of column indices .
* @ return m1 ( r0 : r1-1 , c ( : ) ) */
public static double [ ] [ ] getMatrix ( final double [ ] [ ] m1 , final int r0 , final int r1 , final int [ ] c ) { } }
|
assert r0 <= r1 : ERR_INVALID_RANGE ; assert r1 <= m1 . length : ERR_MATRIX_DIMENSIONS ; final int rowdim = r1 - r0 , coldim = c . length ; final double [ ] [ ] X = new double [ rowdim ] [ coldim ] ; for ( int i = r0 ; i < r1 ; i ++ ) { final double [ ] row = m1 [ i ] ; final double [ ] Xi = X [ i - r0 ] ; for ( int j = 0 ; j < coldim ; j ++ ) { Xi [ j ] = row [ c [ j ] ] ; } } return X ;
|
public class INodeFileUnderConstruction { /** * Add this INodeFileUnderConstruction to the list of datanodes . */
private void addINodeToDatanodeDescriptors ( DatanodeDescriptor [ ] targets ) { } }
|
if ( targets != null ) { for ( DatanodeDescriptor node : targets ) { node . addINode ( this ) ; } }
|
public class PTBConstituent { /** * getter for gramRole - gets Grammatical role , O
* @ generated
* @ return value of the feature */
public String getGramRole ( ) { } }
|
if ( PTBConstituent_Type . featOkTst && ( ( PTBConstituent_Type ) jcasType ) . casFeat_gramRole == null ) jcasType . jcas . throwFeatMissing ( "gramRole" , "de.julielab.jules.types.PTBConstituent" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( PTBConstituent_Type ) jcasType ) . casFeatCode_gramRole ) ;
|
public class CreatePresetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( CreatePresetRequest createPresetRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( createPresetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createPresetRequest . getCategory ( ) , CATEGORY_BINDING ) ; protocolMarshaller . marshall ( createPresetRequest . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( createPresetRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createPresetRequest . getSettings ( ) , SETTINGS_BINDING ) ; protocolMarshaller . marshall ( createPresetRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class JpaStorage { /** * Returns a list of all contracts for the given client .
* @ param organizationId
* @ param clientId
* @ param version
* @ throws StorageException */
protected List < ContractSummaryBean > getClientContractsInternal ( String organizationId , String clientId , String version ) throws StorageException { } }
|
List < ContractSummaryBean > rval = new ArrayList < > ( ) ; EntityManager entityManager = getActiveEntityManager ( ) ; String jpql = "SELECT c from ContractBean c " + " JOIN c.client clientv " + " JOIN clientv.client client " + " JOIN client.organization aorg" + " WHERE client.id = :clientId " + " AND aorg.id = :orgId " + " AND clientv.version = :version " + " ORDER BY aorg.id, client.id ASC" ; Query query = entityManager . createQuery ( jpql ) ; query . setParameter ( "orgId" , organizationId ) ; // $ NON - NLS - 1 $
query . setParameter ( "clientId" , clientId ) ; // $ NON - NLS - 1 $
query . setParameter ( "version" , version ) ; // $ NON - NLS - 1 $
List < ContractBean > contracts = query . getResultList ( ) ; for ( ContractBean contractBean : contracts ) { ClientBean client = contractBean . getClient ( ) . getClient ( ) ; ApiBean api = contractBean . getApi ( ) . getApi ( ) ; PlanBean plan = contractBean . getPlan ( ) . getPlan ( ) ; OrganizationBean clientOrg = entityManager . find ( OrganizationBean . class , client . getOrganization ( ) . getId ( ) ) ; OrganizationBean apiOrg = entityManager . find ( OrganizationBean . class , api . getOrganization ( ) . getId ( ) ) ; ContractSummaryBean csb = new ContractSummaryBean ( ) ; csb . setClientId ( client . getId ( ) ) ; csb . setClientOrganizationId ( client . getOrganization ( ) . getId ( ) ) ; csb . setClientOrganizationName ( clientOrg . getName ( ) ) ; csb . setClientName ( client . getName ( ) ) ; csb . setClientVersion ( contractBean . getClient ( ) . getVersion ( ) ) ; csb . setContractId ( contractBean . getId ( ) ) ; csb . setCreatedOn ( contractBean . getCreatedOn ( ) ) ; csb . setPlanId ( plan . getId ( ) ) ; csb . setPlanName ( plan . getName ( ) ) ; csb . setPlanVersion ( contractBean . getPlan ( ) . getVersion ( ) ) ; csb . setApiDescription ( api . getDescription ( ) ) ; csb . setApiId ( api . getId ( ) ) ; csb . setApiName ( api . getName ( ) ) ; csb . setApiOrganizationId ( apiOrg . getId ( ) ) ; csb . setApiOrganizationName ( apiOrg . getName ( ) ) ; csb . setApiVersion ( contractBean . getApi ( ) . getVersion ( ) ) ; rval . add ( csb ) ; } return rval ;
|
public class Log { /** * Low - level logging call .
* @ param priority The priority / type of this log message
* @ param tag Used to identify the source of a log message . It usually identifies
* the class or activity where the log call occurs .
* @ param msg The message you would like logged .
* @ return The number of bytes written . */
public static int println ( int priority , String tag , String msg ) { } }
|
return println_native ( LOG_ID_MAIN , priority , tag , msg ) ;
|
public class NumberProcessor { /** * ~ Methoden - - - - - */
@ SuppressWarnings ( "unchecked" ) @ Override public int print ( ChronoDisplay formattable , Appendable buffer , AttributeQuery attributes , Set < ElementPosition > positions , // optional
boolean quickPath ) throws IOException { } }
|
int start = ( ( buffer instanceof CharSequence ) ? ( ( CharSequence ) buffer ) . length ( ) : - 1 ) ; int printed = 0 ; NumberSystem numsys ; char zeroChar ; if ( quickPath ) { numsys = this . numberSystem ; zeroChar = this . zeroDigit ; } else { numsys = attributes . get ( Attributes . NUMBER_SYSTEM , NumberSystem . ARABIC ) ; zeroChar = ( attributes . contains ( Attributes . ZERO_DIGIT ) ? attributes . get ( Attributes . ZERO_DIGIT ) . charValue ( ) : ( numsys . isDecimal ( ) ? numsys . getDigits ( ) . charAt ( 0 ) : '0' ) ) ; } if ( quickPath && this . fixedInt ) { int v = formattable . getInt ( ( ChronoElement < Integer > ) this . element ) ; if ( v < 0 ) { if ( v == Integer . MIN_VALUE ) { return - 1 ; // we hope for or - alternative in next steps
} else { throw new IllegalArgumentException ( "Negative value not allowed according to sign policy." ) ; } } int count = length ( v ) ; if ( count > this . maxDigits ) { throw new IllegalArgumentException ( "Element " + this . element . name ( ) + " cannot be printed as the formatted value " + v + " exceeds the maximum width of " + this . maxDigits + "." ) ; } for ( int i = 0 , n = this . minDigits - count ; i < n ; i ++ ) { buffer . append ( '0' ) ; printed ++ ; } if ( count == 2 ) { appendTwoDigits ( v , buffer , '0' ) ; } else if ( count == 1 ) { buffer . append ( ( char ) ( v + 48 ) ) ; } else if ( v >= 2000 && v < 2100 ) { buffer . append ( '2' ) ; buffer . append ( '0' ) ; appendTwoDigits ( v - 2000 , buffer , '0' ) ; } else if ( v >= 1900 && v < 2000 ) { buffer . append ( '1' ) ; buffer . append ( '9' ) ; appendTwoDigits ( v - 1900 , buffer , '0' ) ; } else { buffer . append ( Integer . toString ( v ) ) ; } printed += count ; } else if ( this . yearOfEra && ( this . element instanceof DualFormatElement ) ) { DualFormatElement te = DualFormatElement . class . cast ( this . element ) ; StringBuilder sb = new StringBuilder ( ) ; te . print ( formattable , sb , attributes , numsys , zeroChar , this . minDigits , this . maxDigits ) ; buffer . append ( sb . toString ( ) ) ; printed = sb . length ( ) ; } else { char defaultZeroChar = numsys . getDigits ( ) . charAt ( 0 ) ; Class < V > type = this . element . getType ( ) ; boolean negative = false ; boolean decimal = numsys . isDecimal ( ) ; String digits = null ; int x ; int count ; if ( type == Integer . class ) { int v = formattable . getInt ( ( ChronoElement < Integer > ) this . element ) ; if ( v == Integer . MIN_VALUE ) { return - 1 ; } negative = ( v < 0 ) ; x = Math . abs ( v ) ; count = length ( x ) ; } else if ( type == Long . class ) { V value = formattable . get ( this . element ) ; long v = Long . class . cast ( value ) . longValue ( ) ; negative = ( v < 0 ) ; digits = ( ( v == Long . MIN_VALUE ) ? "9223372036854775808" : Long . toString ( Math . abs ( v ) ) ) ; x = Integer . MIN_VALUE ; // satisfies compiler
count = digits . length ( ) ; defaultZeroChar = '0' ; } else if ( Enum . class . isAssignableFrom ( type ) ) { int v = Integer . MIN_VALUE ; if ( this . element instanceof NumericalElement ) { v = enumToInt ( element , formattable , attributes ) ; negative = ( v < 0 ) ; } if ( v == Integer . MIN_VALUE ) { throw new IllegalArgumentException ( "Cannot print: " + this . element ) ; } x = Math . abs ( v ) ; count = length ( x ) ; } else { throw new IllegalArgumentException ( "Not formattable: " + this . element ) ; } if ( decimal ) { if ( zeroChar != defaultZeroChar ) { // rare case
int diff = zeroChar - defaultZeroChar ; if ( digits == null ) { digits = numsys . toNumeral ( x ) ; } char [ ] characters = digits . toCharArray ( ) ; for ( int i = 0 ; i < characters . length ; i ++ ) { characters [ i ] = ( char ) ( characters [ i ] + diff ) ; } digits = new String ( characters ) ; } if ( count > this . maxDigits ) { if ( digits == null ) { digits = numsys . toNumeral ( x ) ; } throw new IllegalArgumentException ( "Element " + this . element . name ( ) + " cannot be printed as the formatted value " + digits + " exceeds the maximum width of " + this . maxDigits + "." ) ; } } if ( negative ) { if ( this . signPolicy == SignPolicy . SHOW_NEVER ) { throw new IllegalArgumentException ( "Negative value not allowed according to sign policy." ) ; } else { buffer . append ( '-' ) ; printed ++ ; } } else { switch ( this . signPolicy ) { case SHOW_ALWAYS : buffer . append ( '+' ) ; printed ++ ; break ; case SHOW_WHEN_BIG_NUMBER : if ( decimal && ( count > this . minDigits ) ) { buffer . append ( '+' ) ; printed ++ ; } break ; default : // no - op
} } if ( decimal ) { for ( int i = 0 , n = this . minDigits - count ; i < n ; i ++ ) { buffer . append ( zeroChar ) ; printed ++ ; } } if ( digits == null ) { if ( decimal ) { if ( count == 2 ) { appendTwoDigits ( x , buffer , zeroChar ) ; } else if ( count == 1 ) { buffer . append ( ( char ) ( x + zeroChar ) ) ; } else if ( x >= 2000 && x < 2100 ) { buffer . append ( ( char ) ( 2 + zeroChar ) ) ; buffer . append ( zeroChar ) ; appendTwoDigits ( x - 2000 , buffer , zeroChar ) ; } else if ( x >= 1900 && x < 2000 ) { buffer . append ( ( char ) ( 1 + zeroChar ) ) ; buffer . append ( ( char ) ( 9 + zeroChar ) ) ; appendTwoDigits ( x - 1900 , buffer , zeroChar ) ; } else { buffer . append ( numsys . toNumeral ( x ) ) ; } } else { count = numsys . toNumeral ( x , buffer ) ; } } else { buffer . append ( digits ) ; count = digits . length ( ) ; } printed += count ; } if ( ( start != - 1 ) && ( printed > 0 ) && ( positions != null ) ) { positions . add ( new ElementPosition ( this . element , start , start + printed ) ) ; } return printed ;
|
public class JCublasNDArrayFactory { /** * In place shuffle of an ndarray
* along a specified set of dimensions
* @ param array the ndarray to shuffle
* @ param dimension the dimension to do the shuffle
* @ return */
@ Override public void shuffle ( INDArray array , Random rnd , int ... dimension ) { } }
|
shuffle ( Collections . singletonList ( array ) , rnd , dimension ) ;
|
public class ParserString { /** * Gemeinsame Initialmethode der drei Konstruktoren , diese erhaelt den CFML Code als char [ ] und
* uebertraegt ihn , in die interen Datenhaltung .
* @ param str */
protected void init ( String str ) { } }
|
int len = str . length ( ) ; text = new char [ len ] ; lcText = new char [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { char c = str . charAt ( i ) ; text [ i ] = c ; if ( c == '\n' || c == '\r' || c == '\t' ) { lcText [ i ] = ' ' ; } else lcText [ i ] = ( ( c >= 'a' && c <= 'z' ) || ( c >= '0' && c <= '9' ) ) ? c : Character . toLowerCase ( c ) ; }
|
public class JKIOUtil { /** * Start fake thread .
* @ param server the server */
public static void startFakeThread ( ServerSocket server ) { } }
|
Thread thread = new Thread ( new FakeRunnable ( server ) ) ; thread . start ( ) ;
|
public class PendingWriteQueue { /** * Removes a pending write operation and performs it via
* { @ link ChannelHandlerContext # write ( Object , ChannelPromise ) } .
* @ return { @ link ChannelFuture } if something was written and { @ code null }
* if the { @ link PendingWriteQueue } is empty . */
public ChannelFuture removeAndWrite ( ) { } }
|
assert ctx . executor ( ) . inEventLoop ( ) ; PendingWrite write = head ; if ( write == null ) { return null ; } Object msg = write . msg ; ChannelPromise promise = write . promise ; recycle ( write , true ) ; return ctx . write ( msg , promise ) ;
|
public class FastFileCheck { /** * Check a file . */
public static boolean checkFile ( Configuration conf , FileSystem srcFs , FileSystem parityFs , Path srcPath , Path parityPath , Codec codec , Progressable reporter , boolean sourceOnly ) throws IOException , InterruptedException { } }
|
FileStatus stat = srcFs . getFileStatus ( srcPath ) ; long blockSize = stat . getBlockSize ( ) ; long len = stat . getLen ( ) ; List < Long > offsets = new ArrayList < Long > ( ) ; // check a small part of each stripe .
for ( int i = 0 ; i * blockSize < len ; i += codec . stripeLength ) { offsets . add ( i * blockSize ) ; } for ( long blockOffset : offsets ) { if ( sourceOnly ) { if ( ! verifySourceFile ( conf , srcFs , stat , codec , blockOffset , reporter ) ) { return false ; } } else { if ( ! verifyFile ( conf , srcFs , parityFs , stat , parityPath , codec , blockOffset , reporter ) ) { return false ; } } } return true ;
|
public class RgbaColor { /** * Creates a new RgbaColor from the specified HSL components .
* < i > Implementation based on < a
* href = " http : / / en . wikipedia . org / wiki / HSL _ and _ HSV " > wikipedia < / a >
* and < a
* href = " http : / / www . w3 . org / TR / css3 - color / # hsl - color " > w3c < / a > < / i >
* @ param H
* Hue [ 0,360)
* @ param S
* Saturation [ 0,100]
* @ param L
* Lightness [ 0,100] */
public static RgbaColor fromHsl ( float H , float S , float L ) { } }
|
// convert to [ 0-1]
H /= 360f ; S /= 100f ; L /= 100f ; float R , G , B ; if ( S == 0 ) { // grey
R = G = B = L ; } else { float m2 = L <= 0.5 ? L * ( S + 1f ) : L + S - L * S ; float m1 = 2f * L - m2 ; R = hue2rgb ( m1 , m2 , H + 1 / 3f ) ; G = hue2rgb ( m1 , m2 , H ) ; B = hue2rgb ( m1 , m2 , H - 1 / 3f ) ; } // convert [ 0-1 ] to [ 0-255]
int r = Math . round ( R * 255f ) ; int g = Math . round ( G * 255f ) ; int b = Math . round ( B * 255f ) ; return new RgbaColor ( r , g , b , 1 ) ;
|
public class Base64Decoder { /** * Returns the decoded form of the given encoded string , as bytes .
* @ param encoded the string to decode
* @ return the decoded form of the encoded string */
public static byte [ ] decodeToBytes ( String encoded ) { } }
|
try { byte [ ] bytes = encoded . getBytes ( "UTF-8" ) ; Base64Decoder in = new Base64Decoder ( new ByteArrayInputStream ( bytes ) ) ; ByteArrayOutputStream out = new ByteArrayOutputStream ( ( int ) ( bytes . length * 0.67 ) ) ; try { byte [ ] buf = new byte [ 4 * 1024 ] ; // 4K buffer
int bytesRead ; while ( ( bytesRead = in . read ( buf ) ) != - 1 ) { out . write ( buf , 0 , bytesRead ) ; } out . close ( ) ; return out . toByteArray ( ) ; } catch ( IOException ignored ) { return null ; } } catch ( UnsupportedEncodingException ignored ) { return null ; }
|
public class PartitionBalance { /** * Dumps the partition IDs per node in terms of zone n - ary type .
* @ param cluster
* @ param storeRoutingPlan
* @ return pretty printed string of detailed zone n - ary type . */
private String dumpZoneNAryDetails ( StoreRoutingPlan storeRoutingPlan ) { } }
|
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "\tDetailed Dump (Zone N-Aries):" ) . append ( Utils . NEWLINE ) ; for ( Node node : storeRoutingPlan . getCluster ( ) . getNodes ( ) ) { int zoneId = node . getZoneId ( ) ; int nodeId = node . getId ( ) ; sb . append ( "\tNode ID: " + nodeId + " in zone " + zoneId ) . append ( Utils . NEWLINE ) ; List < Integer > naries = storeRoutingPlan . getZoneNAryPartitionIds ( nodeId ) ; Map < Integer , List < Integer > > zoneNaryTypeToPartitionIds = new HashMap < Integer , List < Integer > > ( ) ; for ( int nary : naries ) { int zoneReplicaType = storeRoutingPlan . getZoneNaryForNodesPartition ( zoneId , nodeId , nary ) ; if ( ! zoneNaryTypeToPartitionIds . containsKey ( zoneReplicaType ) ) { zoneNaryTypeToPartitionIds . put ( zoneReplicaType , new ArrayList < Integer > ( ) ) ; } zoneNaryTypeToPartitionIds . get ( zoneReplicaType ) . add ( nary ) ; } for ( int replicaType : new TreeSet < Integer > ( zoneNaryTypeToPartitionIds . keySet ( ) ) ) { sb . append ( "\t\t" + replicaType + " : " ) ; sb . append ( zoneNaryTypeToPartitionIds . get ( replicaType ) . toString ( ) ) ; sb . append ( Utils . NEWLINE ) ; } } return sb . toString ( ) ;
|
public class FloatIterator { /** * Lazy evaluation .
* @ param iteratorSupplier
* @ return */
public static FloatIterator of ( final Supplier < ? extends FloatIterator > iteratorSupplier ) { } }
|
N . checkArgNotNull ( iteratorSupplier , "iteratorSupplier" ) ; return new FloatIterator ( ) { private FloatIterator iter = null ; private boolean isInitialized = false ; @ Override public boolean hasNext ( ) { if ( isInitialized == false ) { init ( ) ; } return iter . hasNext ( ) ; } @ Override public float nextFloat ( ) { if ( isInitialized == false ) { init ( ) ; } return iter . nextFloat ( ) ; } private void init ( ) { if ( isInitialized == false ) { isInitialized = true ; iter = iteratorSupplier . get ( ) ; } } } ;
|
public class TagChecker { /** * { @ inheritDoc } */
@ Override public Collection < Node > check ( final Collection < Node > nodes ) { } }
|
Assert . notNull ( nodes , "nodes is null!" ) ; this . nodes = nodes ; result = new LinkedHashSet < Node > ( ) ; switch ( selector . getCombinator ( ) ) { case DESCENDANT : addDescendantElements ( ) ; break ; case CHILD : addChildElements ( ) ; break ; case ADJACENT_SIBLING : addAdjacentSiblingElements ( ) ; break ; case GENERAL_SIBLING : addGeneralSiblingElements ( ) ; break ; } return result ;
|
public class Humanize { /** * Same as { @ link # lossyEquals ( String , String ) } for the specified locale .
* @ param locale
* The target locale
* @ param source
* The source string to be compared
* @ param target
* The target string to be compared
* @ return true if the two strings are equals according to primary
* differences only , false otherwise */
public static boolean lossyEquals ( final Locale locale , final String source , final String target ) { } }
|
return withinLocale ( new Callable < Boolean > ( ) { @ Override public Boolean call ( ) throws Exception { return lossyEquals ( source , target ) ; } } , locale ) ;
|
public class ConfigReader { /** * Load config from the given YAML stream
* @ param inputStream the name of YAML stream to read
* @ return Map , contains the key value pairs of config */
@ SuppressWarnings ( "unchecked" ) // yaml . load API returns raw Map
public static Map < String , Object > loadStream ( InputStream inputStream ) { } }
|
LOG . fine ( "Reading config stream" ) ; Yaml yaml = new Yaml ( ) ; Map < Object , Object > propsYaml = ( Map < Object , Object > ) yaml . load ( inputStream ) ; LOG . fine ( "Successfully read config" ) ; Map < String , Object > typedMap = new HashMap < > ( ) ; for ( Object key : propsYaml . keySet ( ) ) { typedMap . put ( key . toString ( ) , propsYaml . get ( key ) ) ; } return typedMap ;
|
public class KeenClient { /** * Get an event object from the eventStore .
* @ param handle the handle object
* @ return the event object for handle
* @ throws IOException */
private Map < String , Object > getEvent ( Object handle ) throws IOException { } }
|
// Get the event from the store .
String jsonEvent = eventStore . get ( handle ) ; // De - serialize the event from its JSON .
StringReader reader = new StringReader ( jsonEvent ) ; Map < String , Object > event = jsonHandler . readJson ( reader ) ; KeenUtils . closeQuietly ( reader ) ; return event ;
|
public class SSOTokenCredentialProvider { /** * Create an SSO token for the specified accessId .
* @ param subject
* @ param principalAccessId
* @ throws TokenCreationFailedException */
private void setSsoTokenCredential ( Subject subject , String principalAccessId ) throws CredentialException { } }
|
try { TokenManager tokenManager = tokenManagerRef . getService ( ) ; SingleSignonToken ssoToken = null ; Set < Token > tokens = subject . getPrivateCredentials ( Token . class ) ; if ( tokens . isEmpty ( ) == false ) { Token ssoLtpaToken = tokens . iterator ( ) . next ( ) ; subject . getPrivateCredentials ( ) . remove ( ssoLtpaToken ) ; ssoToken = tokenManager . createSSOToken ( ssoLtpaToken ) ; } else { Map < String , Object > tokenData = new HashMap < String , Object > ( ) ; tokenData . put ( "unique_id" , principalAccessId ) ; ssoToken = tokenManager . createSSOToken ( tokenData ) ; } subject . getPrivateCredentials ( ) . add ( ssoToken ) ; } catch ( TokenCreationFailedException e ) { throw new CredentialException ( e . getLocalizedMessage ( ) ) ; }
|
public class GVRGearCursorController { /** * Set the depth of the cursor .
* This is the length of the ray from the origin
* to the cursor .
* @ param depth default cursor depth */
@ Override public void setCursorDepth ( float depth ) { } }
|
super . setCursorDepth ( depth ) ; if ( mRayModel != null ) { mRayModel . getTransform ( ) . setScaleZ ( mCursorDepth ) ; }
|
public class IsEMail { /** * Checks the syntax of an email address .
* @ param email
* The email address to be checked .
* @ param checkDNS
* Whether a DNS check should be performed or not .
* @ return True if the email address is valid .
* @ throws DNSLookupException
* Is thrown if an internal error in the DNS lookup appeared . */
public static boolean is_email ( String email , boolean checkDNS ) throws DNSLookupException { } }
|
return ( is_email_verbose ( email , checkDNS ) . getState ( ) == GeneralState . OK ) ;
|
public class ScrollBarButtonPainter { /** * DOCUMENT ME !
* @ param buttonsTogether DOCUMENT ME !
* @ param isIncrease DOCUMENT ME !
* @ return DOCUMENT ME ! */
private TwoColors getScrollBarButtonBackgroundColors ( boolean buttonsTogether , boolean isIncrease ) { } }
|
if ( state == Which . FOREGROUND_CAP ) { return scrollBarCapColors ; } else if ( isPressed ) { return isIncrease ? scrollBarButtonIncreasePressed : scrollBarButtonDecreasePressed ; } else { if ( buttonsTogether ) { return isIncrease ? scrollBarButtonIncreaseTogether : scrollBarButtonDecreaseTogether ; } else { return isIncrease ? scrollBarButtonIncreaseApart : scrollBarButtonDecreaseApart ; } }
|
public class CmsSitemapDNDController { /** * Hides the content of list items by setting a specific css class . < p >
* @ param element the list item element */
private void hideItemContent ( Element element ) { } }
|
List < Element > itemWidget = CmsDomUtil . getElementsByClass ( org . opencms . gwt . client . ui . css . I_CmsLayoutBundle . INSTANCE . listItemWidgetCss ( ) . itemContainer ( ) , element ) ; if ( ( itemWidget != null ) && ( itemWidget . size ( ) > 0 ) ) { itemWidget . get ( 0 ) . addClassName ( I_CmsSitemapLayoutBundle . INSTANCE . sitemapItemCss ( ) . contentHide ( ) ) ; }
|
public class JsonErrorResponseHandler { /** * Create an AmazonServiceException using the chain of unmarshallers . This method will never
* return null , it will always return a valid AmazonServiceException
* @ param errorCode
* Error code to find an appropriate unmarshaller
* @ param jsonContent
* JsonContent of HTTP response
* @ return AmazonServiceException */
private AmazonServiceException createException ( String errorCode , JsonContent jsonContent ) { } }
|
AmazonServiceException ase = unmarshallException ( errorCode , jsonContent ) ; if ( ase == null ) { ase = new AmazonServiceException ( "Unable to unmarshall exception response with the unmarshallers provided" ) ; } return ase ;
|
public class AbstractAzkabanServlet { /** * Returns the session value of the request . */
protected void setSessionValue ( final HttpServletRequest request , final String key , final Object value ) { } }
|
request . getSession ( true ) . setAttribute ( key , value ) ;
|
public class SetIdentityPoolConfigurationRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SetIdentityPoolConfigurationRequest setIdentityPoolConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( setIdentityPoolConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( setIdentityPoolConfigurationRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; protocolMarshaller . marshall ( setIdentityPoolConfigurationRequest . getPushSync ( ) , PUSHSYNC_BINDING ) ; protocolMarshaller . marshall ( setIdentityPoolConfigurationRequest . getCognitoStreams ( ) , COGNITOSTREAMS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class DescribeTransitGatewayVpcAttachmentsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeTransitGatewayVpcAttachmentsRequest > getDryRunRequest ( ) { } }
|
Request < DescribeTransitGatewayVpcAttachmentsRequest > request = new DescribeTransitGatewayVpcAttachmentsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
|
public class OptionGroup { /** * Indicates if the group has an option with the given value , which is also enabled .
* This group must also be enabled .
* @ param value Option value to check
* @ return True if the option for the value exists , it ' s enabled and this group is enabled */
public boolean hasValueEnabled ( String value ) { } }
|
return enabled && options . containsKey ( value ) && options . get ( value ) . isEnabled ( ) ;
|
public class MemcachedBackupSessionManager { /** * { @ inheritDoc } */
@ Override public void start ( ) throws LifecycleException { } }
|
if ( ! initialized ) { init ( ) ; } // Validate and update our current component state
if ( _started ) { return ; } _lifecycle . fireLifecycleEvent ( START_EVENT , null ) ; _started = true ; // Force initialization of the random number generator
if ( log . isDebugEnabled ( ) ) { log . debug ( "Force random number initialization starting" ) ; } super . generateSessionId ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Force random number initialization completed" ) ; } startInternal ( null ) ;
|
public class DeploymentRequestServlet { /** * / deployment / security */
@ Override public void doGet ( HttpServletRequest request , HttpServletResponse response ) throws IOException , ServletException { } }
|
super . doGet ( request , response ) ; // jsonp is specified when response is expected to go to javascript function .
String jsonp = request . getParameter ( HTTPClientInterface . JSONP ) ; AuthenticationResult authResult = null ; String target = request . getPathInfo ( ) ; try { response . setContentType ( JSON_CONTENT_TYPE ) ; if ( ! HTTPClientInterface . validateJSONP ( jsonp , ( Request ) request , response ) ) { return ; } response . setStatus ( HttpServletResponse . SC_OK ) ; // Requests require authentication .
authResult = authenticate ( request ) ; if ( ! authResult . isAuthenticated ( ) ) { response . getWriter ( ) . print ( buildClientResponse ( jsonp , ClientResponse . UNEXPECTED_FAILURE , authResult . m_message ) ) ; response . setStatus ( HttpServletResponse . SC_UNAUTHORIZED ) ; return ; } // Authenticated but has no permissions .
if ( ! authResult . m_authUser . hasPermission ( Permission . ADMIN ) ) { response . getWriter ( ) . print ( buildClientResponse ( jsonp , ClientResponse . UNEXPECTED_FAILURE , "Permission denied" ) ) ; response . setStatus ( HttpServletResponse . SC_UNAUTHORIZED ) ; return ; } if ( target != null && ! target . endsWith ( "/" ) ) { // the URI may or may not end with /
target += "/" ; } if ( target == null ) { target = "/" ; // Default .
} // Authenticated and has ADMIN permission
if ( target . equals ( "/download/" ) ) { // Deployment xml is text / xml
response . setContentType ( "text/xml;charset=utf-8" ) ; DeploymentType dt = CatalogUtil . shallowClusterAndPathsClone ( this . getDeployment ( ) ) ; // reflect the actual number of cluster members
dt . getCluster ( ) . setHostcount ( getCatalogContext ( ) . getClusterSettings ( ) . hostcount ( ) ) ; response . getWriter ( ) . write ( CatalogUtil . getDeployment ( dt , true ) ) ; } else if ( target . startsWith ( "/users/" ) ) { // username may be passed in after the / ( not as a param )
if ( request . getMethod ( ) . equalsIgnoreCase ( "POST" ) ) { handleUpdateUser ( jsonp , target , request , response , authResult ) ; } else if ( request . getMethod ( ) . equalsIgnoreCase ( "PUT" ) ) { handleCreateUser ( jsonp , target , request , response , authResult ) ; } else if ( request . getMethod ( ) . equalsIgnoreCase ( "DELETE" ) ) { handleRemoveUser ( jsonp , target , request , response , authResult ) ; } else { handleGetUsers ( jsonp , target , request , response ) ; } } else if ( target . equals ( "/export/types/" ) ) { handleGetExportTypes ( jsonp , response ) ; } else if ( target . equals ( "/" ) ) { // just deployment
if ( request . getMethod ( ) . equalsIgnoreCase ( "POST" ) ) { handleUpdateDeployment ( jsonp , request , response , authResult ) ; } else { // non POST
response . setCharacterEncoding ( "UTF-8" ) ; if ( jsonp != null ) { response . getWriter ( ) . write ( jsonp + "(" ) ; } DeploymentType dt = getDeployment ( ) ; // reflect the actual number of cluster members
dt . getCluster ( ) . setHostcount ( getCatalogContext ( ) . getClusterSettings ( ) . hostcount ( ) ) ; m_mapper . writeValue ( response . getWriter ( ) , dt ) ; if ( jsonp != null ) { response . getWriter ( ) . write ( ")" ) ; } } } else { response . getWriter ( ) . print ( buildClientResponse ( jsonp , ClientResponse . UNEXPECTED_FAILURE , "Resource not found" ) ) ; response . setStatus ( HttpServletResponse . SC_NOT_FOUND ) ; } } catch ( Exception ex ) { m_log . info ( "Not servicing url: " + target + " Details: " + ex . getMessage ( ) , ex ) ; }
|
public class ArtifactHandler { /** * Return an artifact regarding its gavc
* @ param gavc String
* @ return DbArtifact */
public DbArtifact getArtifact ( final String gavc ) { } }
|
final DbArtifact artifact = repositoryHandler . getArtifact ( gavc ) ; if ( artifact == null ) { throw new WebApplicationException ( Response . status ( Response . Status . NOT_FOUND ) . entity ( "Artifact " + gavc + " does not exist." ) . build ( ) ) ; } return artifact ;
|
public class TouchImageView { /** * Return a Rect representing the zoomed image .
* @ return rect representing zoomed image */
public RectF getZoomedRect ( ) { } }
|
if ( mScaleType == ScaleType . FIT_XY ) { throw new UnsupportedOperationException ( "getZoomedRect() not supported with FIT_XY" ) ; } PointF topLeft = transformCoordTouchToBitmap ( 0 , 0 , true ) ; PointF bottomRight = transformCoordTouchToBitmap ( viewWidth , viewHeight , true ) ; float w = getDrawable ( ) . getIntrinsicWidth ( ) ; float h = getDrawable ( ) . getIntrinsicHeight ( ) ; return new RectF ( topLeft . x / w , topLeft . y / h , bottomRight . x / w , bottomRight . y / h ) ;
|
public class AmazonCodeDeployClient { /** * Deregisters an on - premises instance .
* @ param deregisterOnPremisesInstanceRequest
* Represents the input of a DeregisterOnPremisesInstance operation .
* @ return Result of the DeregisterOnPremisesInstance operation returned by the service .
* @ throws InstanceNameRequiredException
* An on - premises instance name was not specified .
* @ throws InvalidInstanceNameException
* The on - premises instance name was specified in an invalid format .
* @ sample AmazonCodeDeploy . DeregisterOnPremisesInstance
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / codedeploy - 2014-10-06 / DeregisterOnPremisesInstance "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DeregisterOnPremisesInstanceResult deregisterOnPremisesInstance ( DeregisterOnPremisesInstanceRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeDeregisterOnPremisesInstance ( request ) ;
|
public class Event { /** * Returns current EventType from EventType , Event or Reservation keys
* @ param key
* @ return */
public static EventType getEventType ( Key key ) { } }
|
Key ek = findAncestor ( Repository . EVENTTYPE , key ) ; return EventType . values ( ) [ ( int ) ek . getId ( ) - 1 ] ;
|
public class StatefulBeanO { /** * Returns true if the specific lifecycle callback should run with a global
* transaction .
* @ param methodId a method id from { @ link LifecycleInterceptorWrapper } */
private boolean isLifecycleCallbackGlobalTx ( int methodId ) { } }
|
EJBMethodInfoImpl [ ] methodInfos = home . beanMetaData . lifecycleInterceptorMethodInfos ; return methodInfos != null && methodInfos [ methodId ] . getTransactionAttribute ( ) == TransactionAttribute . TX_REQUIRES_NEW ;
|
public class ProjectJson { /** * Returns the system input definition URL represented by the given JSON value . */
private static URI asSystemInputRef ( JsonValue json ) { } }
|
try { URI uri = null ; if ( json != null && json . getValueType ( ) == STRING ) { uri = new URI ( ( ( JsonString ) json ) . getChars ( ) . toString ( ) ) ; } return uri ; } catch ( Exception e ) { throw new ProjectException ( "Error reading input definition" , e ) ; }
|
public class PlaceController { /** * Called before a request is submitted to the server to leave the current place . As such ,
* this method may be called multiple times before { @ link # didLeavePlace } is finally called .
* The request to leave may be rejected , but if a place controller needs to flush any
* information to the place manager before it leaves , it should so do here . This is the only
* place in which the controller is guaranteed to be able to communicate to the place manager ,
* as by the time { @ link # didLeavePlace } is called , the place manager may have already been
* destroyed . */
public void mayLeavePlace ( final PlaceObject plobj ) { } }
|
// let our delegates know what ' s up
applyToDelegates ( new DelegateOp ( PlaceControllerDelegate . class ) { @ Override public void apply ( PlaceControllerDelegate delegate ) { delegate . mayLeavePlace ( plobj ) ; } } ) ;
|
public class ADictionary { /** * 1 , synonyms words to synonyms entry
* 2 , loop each synonyms word and set the IWord # synEntry
* 3 , clear the synonyms buffer */
public void resetSynonymsNet ( ) { } }
|
synchronized ( synBuffer ) { if ( synBuffer . size ( ) == 0 ) { return ; } Iterator < String [ ] > it = synBuffer . iterator ( ) ; while ( it . hasNext ( ) ) { String [ ] synLine = it . next ( ) ; // / if ( synLine [ 0 ] . length ( ) > config . MAX _ LENGTH ) {
// / continue ;
// check if the baseWord is exists or not
IWord baseWord = get ( ILexicon . CJK_WORD , synLine [ 0 ] ) ; if ( baseWord == null ) { continue ; } /* * first get the synonyms entry from the root map
* create a new one and map it with the root word if it not exists */
SynonymsEntry synEntry = rootMap . get ( baseWord . getValue ( ) ) ; if ( synEntry == null ) { synEntry = new SynonymsEntry ( baseWord ) ; rootMap . put ( baseWord . getValue ( ) , synEntry ) ; synEntry . add ( baseWord ) ; // add the base word first
} for ( int i = 1 ; i < synLine . length ; i ++ ) { String [ ] parts = synLine [ i ] . split ( "\\s*/\\s*" ) ; // / if ( parts [ 0 ] . length ( ) > config . MAX _ LENGTH ) {
// / continue ;
// check if the word is exists or not
// or create a new one
IWord synWord = get ( ILexicon . CJK_WORD , parts [ 0 ] ) ; if ( synWord == null ) { synWord = new Word ( parts [ 0 ] , IWord . T_CJK_WORD ) ; add ( ILexicon . CJK_WORD , synWord ) ; } // check and extends the part of speech from the baseWord
if ( synWord . getPartSpeech ( ) == null ) { synWord . setPartSpeech ( baseWord . getPartSpeech ( ) ) ; } // check and extends the entity from the baseWord
if ( synWord . getEntity ( ) == null ) { synWord . setEntity ( baseWord . getEntity ( ) ) ; } // check and set the pinyin
if ( parts . length > 1 ) { synWord . setPinyin ( parts [ 1 ] ) ; } synEntry . add ( synWord ) ; } } synBuffer . clear ( ) ; }
|
public class Transport { /** * Delivers a list of a child entries .
* @ param classs
* target class . */
public < T > List < T > getChildEntries ( Class < ? > parentClass , String parentId , Class < T > classs ) throws RedmineException { } }
|
final EntityConfig < T > config = getConfig ( classs ) ; final URI uri = getURIConfigurator ( ) . getChildObjectsURI ( parentClass , parentId , classs ) ; HttpGet http = new HttpGet ( uri ) ; String response = getCommunicator ( ) . sendRequest ( http ) ; final JSONObject responseObject ; try { responseObject = RedmineJSONParser . getResponse ( response ) ; return JsonInput . getListNotNull ( responseObject , config . multiObjectName , config . parser ) ; } catch ( JSONException e ) { throw new RedmineFormatException ( "Bad categories response " + response , e ) ; }
|
public class MetadataProviderImpl { /** * Return the { @ link TypeMetadata } instance representing the given type .
* @ param type
* The type .
* @ param metadataType
* The expected metadata type .
* @ param < T >
* The metadata type .
* @ return The { @ link TypeMetadata } instance . */
private < T extends TypeMetadata > T getMetadata ( Class < ? > type , Class < T > metadataType ) { } }
|
TypeMetadata typeMetadata = metadataByType . get ( type ) ; if ( typeMetadata == null ) { throw new XOException ( "Cannot resolve metadata for type " + type . getName ( ) + "." ) ; } if ( ! metadataType . isAssignableFrom ( typeMetadata . getClass ( ) ) ) { throw new XOException ( "Expected metadata of type '" + metadataType . getName ( ) + "' but got '" + typeMetadata . getClass ( ) + "' for type '" + type + "'" ) ; } return metadataType . cast ( typeMetadata ) ;
|
public class Matrix4f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4fc # invert ( org . joml . Matrix4f ) */
public Matrix4f invert ( Matrix4f dest ) { } }
|
if ( ( properties & PROPERTY_IDENTITY ) != 0 ) { return dest . identity ( ) ; } else if ( ( properties & PROPERTY_TRANSLATION ) != 0 ) return invertTranslation ( dest ) ; else if ( ( properties & PROPERTY_ORTHONORMAL ) != 0 ) return invertOrthonormal ( dest ) ; else if ( ( properties & PROPERTY_AFFINE ) != 0 ) return invertAffine ( dest ) ; else if ( ( properties & PROPERTY_PERSPECTIVE ) != 0 ) return invertPerspective ( dest ) ; return invertGeneric ( dest ) ;
|
public class GlobalNamespace { /** * True if the given Node is the GETPROP in a statement like ` some . q . name ; `
* < p > Such do - nothing statements often have JSDoc on them and are intended to declare the
* qualified name .
* @ param node any Node , or even null */
private static boolean isQnameDeclarationWithoutAssignment ( @ Nullable Node node ) { } }
|
return node != null && node . isGetProp ( ) && node . getParent ( ) . isExprResult ( ) ;
|
public class ClientNotificationArea { /** * This method will be called by the NotificationListener once the MBeanServer pushes a notification . */
public void addNotfication ( Notification notification ) { } }
|
Object source = notification . getSource ( ) ; NotificationRecord nr ; if ( source instanceof ObjectName ) { nr = new NotificationRecord ( notification , ( ObjectName ) source ) ; } else { nr = new NotificationRecord ( notification , ( source != null ) ? source . toString ( ) : null ) ; } addNotficationRecord ( nr ) ;
|
public class TaskDeploymentDescriptor { /** * Return the sub task ' s serialized job information .
* @ return serialized job information ( may be < tt > null < / tt > before a call to { @ link
* # loadBigData ( PermanentBlobService ) } ) . */
@ Nullable public SerializedValue < JobInformation > getSerializedJobInformation ( ) { } }
|
if ( serializedJobInformation instanceof NonOffloaded ) { NonOffloaded < JobInformation > jobInformation = ( NonOffloaded < JobInformation > ) serializedJobInformation ; return jobInformation . serializedValue ; } else { throw new IllegalStateException ( "Trying to work with offloaded serialized job information." ) ; }
|
public class ArrayUtil { /** * 移除数组中指定的元素 < br >
* 只会移除匹配到的第一个元素 copy from commons - lang
* @ param < T > 数组元素类型
* @ param array 数组对象 , 可以是对象数组 , 也可以原始类型数组
* @ param element 要移除的元素
* @ return 去掉指定元素后的新数组或原数组
* @ throws IllegalArgumentException 参数对象不为数组对象
* @ since 3.0.8 */
public static < T > T [ ] removeEle ( T [ ] array , T element ) throws IllegalArgumentException { } }
|
return remove ( array , indexOf ( array , element ) ) ;
|
public class BackupClientImpl { /** * { @ inheritDoc } */
public String stop ( String backupId ) throws IOException , BackupExecuteException { } }
|
// first try to find current repository backup
String sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . CURRENT_BACKUPS_REPOSITORY_INFO ; BackupAgentResponse repositoryResponse = transport . executeGET ( sURL ) ; if ( repositoryResponse . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { ShortInfoList repositoryInfoList ; try { repositoryInfoList = ( ShortInfoList ) getObject ( ShortInfoList . class , repositoryResponse . getResponseData ( ) ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Can not get ShortInfoList from responce." , e ) ; } for ( ShortInfo info : repositoryInfoList . getBackups ( ) ) { if ( info . getBackupId ( ) . equals ( backupId ) ) { // repository backup
sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . STOP_BACKUP_REPOSITORY + "/" + backupId ; BackupAgentResponse response = transport . executeGET ( sURL ) ; if ( response . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { return "\nSuccessful : \n" + "\tstatus code = " + response . getStatus ( ) + "\n" ; } else { return failureProcessing ( response ) ; } } } } // then try to find current workspace backup
sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . CURRENT_BACKUPS_INFO ; BackupAgentResponse workspaceResponse = transport . executeGET ( sURL ) ; if ( workspaceResponse . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { ShortInfoList workspaceInfoList ; try { workspaceInfoList = ( ShortInfoList ) getObject ( ShortInfoList . class , workspaceResponse . getResponseData ( ) ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Can not get ShortInfoList from responce." , e ) ; } for ( ShortInfo info : workspaceInfoList . getBackups ( ) ) { if ( info . getBackupId ( ) . equals ( backupId ) ) { // workspace backup
sURL = path + HTTPBackupAgent . Constants . BASE_URL + HTTPBackupAgent . Constants . OperationType . STOP_BACKUP + "/" + backupId ; BackupAgentResponse response = transport . executeGET ( sURL ) ; if ( response . getStatus ( ) == Response . Status . OK . getStatusCode ( ) ) { return "\nSuccessful : \n" + "\tstatus code = " + response . getStatus ( ) + "\n" ; } else { return failureProcessing ( response ) ; } } } } return "\nFailure :\n" + "\tmessage : There are no active backup with id " + backupId ;
|
public class FaxClientSpiFactory { /** * Returns the configuration to be used by the new SPI instance . < br >
* The configuration of the fax client SPI is made up of 3 layers .
* The lowest layer is the internal fax4j . properties file located in the fax4j jar .
* This configuration file contains the initial default configuration .
* The next layer is the external fax4j . properties file that is located on the classpath .
* This file is optional and provides the ability to override the internal configuration .
* The top most layer is the optional properties object provided by the external classes .
* These properties enable to override the configuration of the lower 2 layers .
* @ param configuration
* The fax client configuration ( may be null )
* @ return The fax client layered configuration */
protected static Map < String , String > getConfiguration ( Properties configuration ) { } }
|
// get system configuration
Map < String , String > systemConfig = LibraryConfigurationLoader . getSystemConfiguration ( ) ; // create new map
Map < String , String > layeredConfiguration = new HashMap < String , String > ( systemConfig ) ; if ( configuration != null ) { // convert to map
SpiUtil . copyPropertiesToMap ( configuration , layeredConfiguration ) ; } return layeredConfiguration ;
|
public class XmlUtil { /** * Escape the following characters { @ code " ' & < > } with their XML entities , e . g .
* { @ code " bread " & " butter " } becomes { @ code & quot ; bread & quot ; & amp ; & quot ; butter & quot } .
* Notes : < ul >
* < li > Supports only the five basic XML entities ( gt , lt , quot , amp , apos ) < / li >
* < li > Does not escape control characters < / li >
* < li > Does not support DTDs or external entities < / li >
* < li > Does not treat surrogate pairs specially < / li >
* < li > Does not perform Unicode validation on its input < / li >
* < / ul >
* @ param orig the original String
* @ return A new string in which all characters that require escaping
* have been replaced with the corresponding XML entities .
* @ see # escapeControlCharacters ( String ) */
public static String escapeXml ( String orig ) { } }
|
return StringGroovyMethods . collectReplacements ( orig , new Closure < String > ( null ) { public String doCall ( Character arg ) { switch ( arg ) { case '&' : return "&" ; case '<' : return "<" ; case '>' : return ">" ; case '"' : return """ ; case '\'' : return "'" ; } return null ; } } ) ;
|
public class MZMLMultiSpectraParser { /** * Given a scan ID goes to the index and tries to find a mapping .
* @ throws umich . ms . fileio . exceptions . FileParsingException in case the mapping can ' t be done */
protected int mapIdRefToInternalScanNum ( CharArray id ) throws FileParsingException { } }
|
String idStr = id . toString ( ) ; MZMLIndexElement byId = index . getById ( idStr ) ; if ( byId == null ) { String msg = String . format ( "Could not find a mapping from spectrum id" + " ref to an internal scan number for" + "\n\t file: %s" + "\n\t spectrum index of the spectrum in which the error occured: #%d" + "\n\t idRef searched for: %s" , source . getPath ( ) , vars . spectrumIndex , idStr ) ; throw new FileParsingException ( msg ) ; } return byId . getNumber ( ) ;
|
public class HtmlOutcomeTargetLink { /** * < p > Return the value of the < code > shape < / code > property . < / p >
* < p > Contents : The shape of the hot spot on the screen
* ( for use in client - side image maps ) . Valid
* values are : default ( entire region ) ; rect
* ( rectangular region ) ; circle ( circular region ) ;
* and poly ( polygonal region ) . */
public java . lang . String getShape ( ) { } }
|
return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . shape ) ;
|
public class MonitorRegistry { /** * Returns only enabled monitors . */
public List < ActivityMonitor > getActivityMonitors ( RuntimeContext context ) { } }
|
return getActivityMonitors ( ) . stream ( ) . filter ( monitor -> monitor . isEnabled ( context ) ) . collect ( Collectors . toList ( ) ) ;
|
public class AbstractCLA { /** * { @ inheritDoc } */
@ Override public ICmdLineArg < E > setEnumCriteria ( final String _enumClassName ) throws ParseException , IOException { } }
|
this . enumClassName = _enumClassName ; Class < ? > enumClass ; try { enumClass = CmdLine . ClassLoader . loadClass ( _enumClassName ) ; } catch ( final ClassNotFoundException e ) { throw new ParseException ( "Enum class not found: " + e . getMessage ( ) , 0 ) ; } final List < E > list = new ArrayList < > ( ) ; if ( ! enumClass . isEnum ( ) ) throw new ParseException ( "Enum class expected, found " + enumClass . getName ( ) , 0 ) ; final Object [ ] constants = enumClass . getEnumConstants ( ) ; for ( final Object constant : constants ) { final String econst = constant . toString ( ) ; list . add ( convert ( econst , true , null ) ) ; } setCriteria ( new EnumCriteria < > ( list ) ) ; return this ;
|
public class ConcurrentLinkedHashMap { /** * Determines whether the buffers should be drained .
* @ param delayable if a drain should be delayed until required
* @ return if a drain should be attempted */
boolean shouldDrainBuffers ( boolean delayable ) { } }
|
if ( executor . isShutdown ( ) ) { DrainStatus status = drainStatus . get ( ) ; return ( status != PROCESSING ) && ( ! delayable || ( status == REQUIRED ) ) ; } return false ;
|
public class TriangularStochasticLaw { /** * Replies the x according to the value of the distribution function .
* @ param u is a value given by the uniform random variable generator { @ code U ( 0 , 1 ) } .
* @ return { @ code F < sup > - 1 < / sup > ( u ) }
* @ throws MathException in case { @ code F < sup > - 1 < / sup > ( u ) } could not be computed */
@ Pure @ Override public double inverseF ( double u ) throws MathException { } }
|
if ( ( u < 0 ) || ( u > 1 ) ) { throw new OutsideDomainException ( u ) ; } if ( u < this . dxmode ) { return Math . sqrt ( u * this . delta1 ) + this . minX ; } return this . maxX - Math . sqrt ( ( 1 - u ) * this . delta2 ) ;
|
public class BaseSamlRegisteredServiceMetadataResolver { /** * Add metadata filters to metadata resolver .
* @ param metadataProvider the metadata provider
* @ param metadataFilterList the metadata filter list */
protected void addMetadataFiltersToMetadataResolver ( final AbstractMetadataResolver metadataProvider , final List < MetadataFilter > metadataFilterList ) { } }
|
val metadataFilterChain = new MetadataFilterChain ( ) ; metadataFilterChain . setFilters ( metadataFilterList ) ; LOGGER . debug ( "Metadata filter chain initialized with [{}] filters" , metadataFilterList . size ( ) ) ; metadataProvider . setMetadataFilter ( metadataFilterChain ) ;
|
public class Tuple { /** * Sets a value at a specific position in the tuple .
* @ param eval the enum which is used to determine the index for the set operation
* @ param val the value to set
* @ return a handle to this object to enable builder operations
* @ see # set ( Enum , Object ) for more info
* @ deprecated use { @ link # setString ( Enum , String ) } instead */
public Tuple set ( Enum < ? > eval , String val ) { } }
|
set ( eval , ( Object ) val ) ; return this ;
|
public class TokensApi { /** * Token Info
* Returns the Token Information
* @ return ApiResponse & lt ; TokenInfoSuccessResponse & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < TokenInfoSuccessResponse > tokenInfoWithHttpInfo ( ) throws ApiException { } }
|
com . squareup . okhttp . Call call = tokenInfoValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < TokenInfoSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
|
public class JsonObject { /** * 添加全局消息
* @ param message
* 消息 */
public void addStatusInfo ( String message ) { } }
|
if ( message == null ) { return ; } statusInfo . put ( WebResponseConstant . MESSAGE_GLOBAL , message ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.