signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class DatabaseDAODefaultImpl { public void put_device_attribute_property ( Database database , String deviceName , DbAttribute [ ] attr ) throws DevFailed { } } | if ( ! database . isAccess_checked ( ) ) checkAccess ( database ) ; DeviceData argIn = new DeviceData ( ) ; try { // value is an array
argIn . insert ( ApiUtil . toStringArray ( deviceName , attr , 2 ) ) ; command_inout ( database , "DbPutDeviceAttributeProperty2" , argIn ) ; } catch ( DevFailed e ) { if ( e . errors [ 0 ] . reason . equals ( "API_CommandNotFound" ) ) { // Value is just one element
argIn . insert ( ApiUtil . toStringArray ( deviceName , attr , 1 ) ) ; command_inout ( database , "DbPutDeviceAttributeProperty" , argIn ) ; } else throw e ; } |
public class Node { /** * Useful to set length of a transpiled node tree to map back to the length of original node . */
public final void setLengthForTree ( int length ) { } } | this . length = length ; for ( Node child = first ; child != null ; child = child . next ) { child . setLengthForTree ( length ) ; } |
public class MsgSettingController { /** * Annotation scan .
* @ param maxdeeplevel the maxdeeplevel
* @ param req the req
* @ param res the res */
@ GetMapping ( "/setting/scan/{maxdeeplevel}" ) public void annotationScan ( @ PathVariable int maxdeeplevel , HttpServletRequest req , HttpServletResponse res ) { } } | this . validationSessionComponent . sessionCheck ( req ) ; this . msgSaver . annotationScan ( maxdeeplevel ) ; |
public class Sanitizers { /** * Makes sure that the given input is a data URI corresponding to an image .
* < p > SanitizedContent kind does not apply - - the directive is also used to ensure no foreign
* resources are loaded . */
public static SanitizedContent filterImageDataUri ( SoyValue value ) { } } | value = normalizeNull ( value ) ; return filterImageDataUri ( value . coerceToString ( ) ) ; |
public class BloomFilter { /** * Copies filter into this . Filter must have the same size , hash function count and nTweak or an
* IllegalArgumentException will be thrown . */
public synchronized void merge ( BloomFilter filter ) { } } | if ( ! this . matchesAll ( ) && ! filter . matchesAll ( ) ) { checkArgument ( filter . data . length == this . data . length && filter . hashFuncs == this . hashFuncs && filter . nTweak == this . nTweak ) ; for ( int i = 0 ; i < data . length ; i ++ ) this . data [ i ] |= filter . data [ i ] ; } else { this . data = new byte [ ] { ( byte ) 0xff } ; } |
public class DeploymentDescriptorParser { /** * This method is used to inspect the ra . xml and check if its a 1.0 RA . This is called only
* if we fail parsing the ra . xml using JAXB for 1.5/1.6
* @ param xmlStream The ra . xml file
* @ return whether the resource adapter is a 1.0 resource adapter */
public static boolean isVersion10ResourceAdapter ( InputStream xmlStream ) { } } | try { SAXParserFactory factory = SAXParserFactory . newInstance ( ) ; factory . setNamespaceAware ( true ) ; factory . setValidating ( false ) ; XMLReader parser = factory . newSAXParser ( ) . getXMLReader ( ) ; SAXVersionHandler handler = new SAXVersionHandler ( ) ; parser . setEntityResolver ( resolver ) ; parser . setContentHandler ( handler ) ; parser . parse ( new InputSource ( xmlStream ) ) ; return handler . isVersion10ResourceAdapter ; } catch ( SAXException ex ) { // Check for FFDC
} catch ( ParserConfigurationException e ) { // Check for FFDC
} catch ( IOException e ) { // Check for FFDC
} return false ; |
public class ChuLiuEdmonds { /** * Find an optimal arborescence of the given graph ` graph ` , rooted in the given node ` root ` . */
public static Weighted < Arborescence < Node > > getMaxArborescence ( WeightedGraph graph , Node root ) { } } | // remove all edges incoming to ` root ` . resulting arborescence is then forced to be rooted at ` root ` .
return getMaxArborescence ( graph . filterEdges ( not ( DirectedEdge . hasDestination ( root ) ) ) ) ; |
public class PrimitiveDoubleArray2dJsonDeserializer { /** * { @ inheritDoc } */
@ Override public double [ ] [ ] doDeserialize ( JsonReader reader , JsonDeserializationContext ctx , JsonDeserializerParameters params ) { } } | List < List < Double > > list = deserializeIntoList ( reader , ctx , DoubleJsonDeserializer . getInstance ( ) , params ) ; if ( list . isEmpty ( ) ) { return new double [ 0 ] [ 0 ] ; } List < Double > firstList = list . get ( 0 ) ; if ( firstList . isEmpty ( ) ) { return new double [ list . size ( ) ] [ 0 ] ; } double [ ] [ ] array = new double [ list . size ( ) ] [ firstList . size ( ) ] ; int i = 0 ; int j ; for ( List < Double > innerList : list ) { j = 0 ; for ( Double value : innerList ) { if ( null != value ) { array [ i ] [ j ] = value ; } j ++ ; } i ++ ; } return array ; |
public class xen_health_monitor_fan_speed { /** * < pre >
* Use this operation to get the IPMI sensor data ( for all fan - speed sensors ) .
* < / pre > */
public static xen_health_monitor_fan_speed [ ] get ( nitro_service client ) throws Exception { } } | xen_health_monitor_fan_speed resource = new xen_health_monitor_fan_speed ( ) ; resource . validate ( "get" ) ; return ( xen_health_monitor_fan_speed [ ] ) resource . get_resources ( client ) ; |
public class GantMetaClass { /** * Process the argument to a { @ code depends } call . If the parameter is a { @ code Closure } just
* process it . If it is a { @ code String } then do a lookup for the { @ code Closure } in the
* binding , and if found process it .
* @ param argument The argument .
* @ return The result of the { @ code Closure } . */
private Object processArgument ( final Object argument ) { } } | final Object returnObject ; if ( argument instanceof Closure ) { returnObject = processClosure ( ( Closure < ? > ) argument ) ; } else { final String errorReport = "depends called with an argument (" + argument + ") that is not a known target or list of targets." ; Object theArgument = argument ; if ( theArgument instanceof GString ) { theArgument = theArgument . toString ( ) ; } if ( theArgument instanceof String ) { final Object entry = binding . getVariable ( ( String ) theArgument ) ; if ( ( entry != null ) && ( entry instanceof Closure ) ) { returnObject = processClosure ( ( Closure < ? > ) entry ) ; } else { throw new RuntimeException ( errorReport ) ; } } else { throw new RuntimeException ( errorReport ) ; } } return returnObject ; |
public class DeploymentsInner { /** * Deletes a deployment from the deployment history .
* A template deployment that is currently running cannot be deleted . Deleting a template deployment removes the associated deployment operations . Deleting a template deployment does not affect the state of the resource group . This is an asynchronous operation that returns a status of 202 until the template deployment is successfully deleted . The Location response header contains the URI that is used to obtain the status of the process . While the process is running , a call to the URI in the Location header returns a status of 202 . When the process finishes , the URI in the Location header returns a status of 204 on success . If the asynchronous request failed , the URI in the Location header returns an error - level status code .
* @ param resourceGroupName The name of the resource group with the deployment to delete . The name is case insensitive .
* @ param deploymentName The name of the deployment to delete .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void delete ( String resourceGroupName , String deploymentName ) { } } | deleteWithServiceResponseAsync ( resourceGroupName , deploymentName ) . toBlocking ( ) . last ( ) . body ( ) ; |
public class GetExportSnapshotRecordsResult { /** * A list of objects describing the export snapshot records .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setExportSnapshotRecords ( java . util . Collection ) } or
* { @ link # withExportSnapshotRecords ( java . util . Collection ) } if you want to override the existing values .
* @ param exportSnapshotRecords
* A list of objects describing the export snapshot records .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetExportSnapshotRecordsResult withExportSnapshotRecords ( ExportSnapshotRecord ... exportSnapshotRecords ) { } } | if ( this . exportSnapshotRecords == null ) { setExportSnapshotRecords ( new java . util . ArrayList < ExportSnapshotRecord > ( exportSnapshotRecords . length ) ) ; } for ( ExportSnapshotRecord ele : exportSnapshotRecords ) { this . exportSnapshotRecords . add ( ele ) ; } return this ; |
public class SettingsActivity { /** * Google found a ' security vulnerability ' and imposed this hack .
* Have to check this fragment was actually conceived by this activity . */
@ Override protected boolean isValidFragment ( String fragmentName ) { } } | Boolean knownFrag = false ; for ( Class < ? > cls : INNER_CLASSES ) { if ( cls . getName ( ) . equals ( fragmentName ) ) { knownFrag = true ; break ; } } return knownFrag ; |
public class HashSHAConverter { /** * Encode the byte string - OVERRIDE this .
* @ param rgbValue
* @ return
* @ throws NoSuchAlgorithmException */
public byte [ ] encodeBytes ( byte [ ] rgbValue ) throws NoSuchAlgorithmException { } } | rgbValue = Base64 . encodeSHA ( rgbValue ) ; rgbValue = super . encodeBytes ( rgbValue ) ; // Base64 encoding
return rgbValue ; |
public class BoxedText { /** * Gets the value of the sec property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the sec property .
* For example , to add a new item , do as follows :
* < pre >
* getSec ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link Sec } */
public java . util . List < Sec > getSec ( ) { } } | if ( sec == null ) { sec = new ArrayList < Sec > ( ) ; } return this . sec ; |
public class DateTime { /** * Formats the given date as an XSD < tt > dateTime < / tt > lexical string , using the offset specified
* by the given timezone .
* If no timezone is specified , then the given date is treated as a local time ; it will be written
* using the offset of the system default timezone , but the timezone offset will be omitted from
* the lexical string . This is < strong > not < / strong > recommended , as the resulting < tt > dateTime < / tt >
* will be relative to some unknown timezone and could be interpreted differently by different systems .
* @ param d A date .
* @ param tz A timezone .
* @ return The corresponding lexical string . */
public static String format ( Date d , TimeZone tz ) { } } | if ( d == null ) throw new IllegalArgumentException ( "Null date value" ) ; long offset = d . getTime ( ) ; int tzOffsetMs = ( tz != null ? tz : LOCAL_TZ ) . getOffset ( d . getTime ( ) ) ; if ( offset > Long . MAX_VALUE - EPOCH_OFFSET_MS - tzOffsetMs ) throw new ArithmeticException ( "Cannot convert to ISO-8601 offset." ) ; offset += EPOCH_OFFSET_MS + tzOffsetMs ; long da = offset / MS_IN_DAY ; int millis = ( int ) ( offset - ( da * MS_IN_DAY ) ) ; // Adjust for negative offsets .
if ( millis < 0 ) { da -- ; millis += MS_IN_DAY ; } int year = ( int ) ( da * 400 / 146097 ) + 1 ; // = = da / 365.2425 + 1
int day = ( int ) ( da - elapsedDays ( year ) ) + 1 ; while ( day < 1 ) day += daysInYear ( -- year ) ; int temp ; while ( day > ( temp = daysInYear ( year ) ) ) { day -= temp ; year ++ ; } int month = FIRST_MONTH ; while ( day > ( temp = daysInMonth ( year , month ) ) ) { day -= temp ; month ++ ; } int hour = millis / MS_IN_HOUR ; millis -= hour * MS_IN_HOUR ; int min = millis / MS_IN_MIN ; millis -= min * MS_IN_MIN ; int sec = millis / MS_IN_SEC ; millis -= sec * MS_IN_SEC ; StringBuilder sb = new StringBuilder ( ) ; if ( year < 0 ) { sb . append ( '-' ) ; year = - year ; } append ( sb , year , 4 ) ; sb . append ( DATE_SEP ) ; append ( sb , month , 2 ) ; sb . append ( DATE_SEP ) ; append ( sb , day , 2 ) ; sb . append ( DATE_TIME_SEP ) ; append ( sb , hour , 2 ) ; sb . append ( TIME_SEP ) ; append ( sb , min , 2 ) ; sb . append ( TIME_SEP ) ; append ( sb , sec , 2 ) ; if ( millis > 0 ) { sb . append ( '.' ) ; for ( int i = 2 ; i >= 0 && millis > 0 ; i -- ) { sb . append ( millis / TENS [ i ] ) ; millis = millis % TENS [ i ] ; } } if ( tz != null ) { if ( tzOffsetMs == 0 ) { sb . append ( 'Z' ) ; } else { int tzOffset = tzOffsetMs / MS_IN_MIN ; // XSD specifies minutes .
sb . append ( tzOffset < 0 ? '-' : '+' ) ; tzOffset = Math . abs ( tzOffset ) ; append ( sb , tzOffset / MINS_IN_HOUR , 2 ) ; sb . append ( TIME_SEP ) ; append ( sb , tzOffset % MINS_IN_HOUR , 2 ) ; } } return sb . toString ( ) ; |
public class NavigationController { /** * Calculate the target position should there be a rescale point . The idea is that after zooming in or out , the
* mouse cursor would still lie at the same position in world space . */
protected Coordinate calculatePosition ( boolean zoomIn , Coordinate rescalePoint ) { } } | ViewPort viewPort = mapPresenter . getViewPort ( ) ; Coordinate position = viewPort . getPosition ( ) ; int index = viewPort . getResolutionIndex ( viewPort . getResolution ( ) ) ; double resolution = viewPort . getResolution ( ) ; if ( zoomIn && index < viewPort . getResolutionCount ( ) - 1 ) { resolution = viewPort . getResolution ( index + 1 ) ; } else if ( ! zoomIn && index > 0 ) { resolution = viewPort . getResolution ( index - 1 ) ; } double factor = viewPort . getResolution ( ) / resolution ; double dX = ( rescalePoint . getX ( ) - position . getX ( ) ) * ( 1 - 1 / factor ) ; double dY = ( rescalePoint . getY ( ) - position . getY ( ) ) * ( 1 - 1 / factor ) ; return new Coordinate ( position . getX ( ) + dX , position . getY ( ) + dY ) ; |
public class HostName { /** * Provides a normalized string which is lowercase for host strings , and which is a normalized string for addresses .
* @ return */
@ Override public String toNormalizedString ( ) { } } | String result = normalizedString ; if ( result == null ) { normalizedString = result = toNormalizedString ( false ) ; } return result ; |
public class CalendarHashChainRegistrationTimeRule { /** * Calculates the time when the signature was registered in the KSI hash calendar . */
private Date calculateRegistrationTime ( CalendarHashChain calendarHashChain ) throws InvalidCalendarHashChainException { } } | List < CalendarHashChainLink > chain = calendarHashChain . getChainLinks ( ) ; long r = calendarHashChain . getPublicationTime ( ) . getTime ( ) / 1000 ; // publication time in seconds
long t = 0 ; // iterate over the chain in reverse
ListIterator < CalendarHashChainLink > li = chain . listIterator ( chain . size ( ) ) ; while ( li . hasPrevious ( ) ) { if ( r <= 0 ) { LOGGER . warn ( "Calendar hash chain shape is inconsistent with publication time" ) ; r = 0 ; return new Date ( 0 ) ; } CalendarHashChainLink link = li . previous ( ) ; if ( ! link . isRightLink ( ) ) { r = highBit ( r ) - 1 ; } else { t = t + highBit ( r ) ; r = r - highBit ( r ) ; } } if ( r != 0 ) { LOGGER . warn ( "Calendar hash chain shape inconsistent with publication time" ) ; t = 0 ; } return new Date ( t * 1000 ) ; |
public class Connection { /** * @ see org . eclipse . datatools . connectivity . oda . IConnection # open ( java . util . Properties ) */
@ SuppressWarnings ( "resource" ) public void open ( Properties connProperties ) throws OdaException { } } | String url = connProperties . getProperty ( DB_URI_PROPERTY ) ; // " remote : 127.0.0.1 / Orienteer " ;
String username = userData != null ? userData . getUserName ( ) : connProperties . getProperty ( DB_USER_PROPERTY ) ; // " admin " ;
String password = userData != null ? userData . getPassword ( ) : connProperties . getProperty ( DB_PASSWORD_PROPERTY ) ; // " admin " ;
try { db = new ODatabaseDocumentTx ( url ) . open ( username , password ) ; m_isOpen = true ; } catch ( Exception e ) { throw new OdaException ( e ) ; } |
public class StreamTransport { /** * { @ inheritDoc } */
@ Override public void error ( Throwable t ) { } } | logger . warn ( "" , t ) ; connectFutureException ( t ) ; TransportsUtil . invokeFunction ( ERROR , decoders , functions , t . getClass ( ) , t , ERROR . name ( ) , resolver ) ; |
public class DBSetup { /** * Execute arbitrary SQL code .
* @ param callInfo Call info .
* @ param db Database
* @ param sql SQL statement .
* @ param args Arguments .
* @ return The value obtained through { @ link PreparedStatement # getUpdateCount ( ) } , after executing the statement . */
static int execute ( CallInfo callInfo , DB db , String sql , Object [ ] args ) { } } | return db . access ( callInfo , ( ) -> { db . logSetup ( callInfo , sql ) ; try ( WrappedStatement ws = db . compile ( sql ) ) { PreparedStatement stmt = ws . getStatement ( ) ; if ( args != null && args . length != 0 ) { for ( int i = 0 ; i < args . length ; i ++ ) { stmt . setObject ( i + 1 , args [ i ] ) ; } } stmt . execute ( ) ; return stmt . getUpdateCount ( ) ; } } ) ; |
public class ServiceKernel { /** * Defines a service provided by this ServiceKernel
* @ param service - the service to provide
* @ param defaultImplementation - the default implementation of this service */
protected < T extends IService , Q extends T > void defineService ( Class < ? extends T > service , Q defaultImplementation ) { } } | if ( ! _definingServices ) { throw new IllegalStateException ( "Service definition must be done only in the defineServices() method." ) ; } if ( ! service . isInterface ( ) ) { throw new IllegalArgumentException ( "Services may only be defined as interfaces, and " + service . getName ( ) + " is not an interface" ) ; } IService existingServiceImpl = _services . get ( service ) ; if ( existingServiceImpl != null ) { throw new IllegalStateException ( "Service " + service . getName ( ) + " has already been " + "defined with the " + existingServiceImpl . getClass ( ) . getName ( ) + " default implementation" ) ; } _services . put ( service , defaultImplementation ) ; |
public class NodeImpl { /** * Validates the element or attribute namespace prefix on this node .
* @ param namespaceAware whether this node is namespace aware
* @ param namespaceURI this node ' s namespace URI */
static String validatePrefix ( String prefix , boolean namespaceAware , String namespaceURI ) { } } | if ( ! namespaceAware ) { throw new DOMException ( DOMException . NAMESPACE_ERR , prefix ) ; } if ( prefix != null ) { if ( namespaceURI == null || ! DocumentImpl . isXMLIdentifier ( prefix ) || "xml" . equals ( prefix ) && ! "http://www.w3.org/XML/1998/namespace" . equals ( namespaceURI ) || "xmlns" . equals ( prefix ) && ! "http://www.w3.org/2000/xmlns/" . equals ( namespaceURI ) ) { throw new DOMException ( DOMException . NAMESPACE_ERR , prefix ) ; } } return prefix ; |
public class KnowledgeBaseManagerSparql { /** * Given a service , this method will fetch and upload the models referred to by the service .
* This is a synchronous implementation that will therefore wait until its fetched and uploaded .
* @ param svc the service to be checked for referred models .
* @ return True if all the models were properly fetched , false otherwise */
private boolean fetchModelsForService ( Service svc ) { } } | boolean result = true ; Set < URI > modelUris = obtainReferencedModelUris ( svc ) ; for ( URI modelUri : modelUris ) { // Only fetch those that are not there and have not been unsucessfully fetched in the last 24 hours
if ( ! this . graphStoreManager . containsGraph ( modelUri ) ) { boolean fetch = true ; // If it was previously unreachable we need to check how long ago we tried
if ( this . unreachableModels . containsKey ( modelUri ) ) { Date now = new Date ( ) ; Date lastAttempt = this . unreachableModels . get ( modelUri ) ; long diffHours = ( now . getTime ( ) - lastAttempt . getTime ( ) ) / ( 60 * 60 * 1000 ) % 24 ; fetch = ( diffHours >= 24 ? true : false ) ; } if ( fetch ) { boolean isStored = this . graphStoreManager . fetchAndStore ( modelUri ) ; if ( ! isStored ) { this . unreachableModels . put ( modelUri , new Date ( ) ) ; } else { this . getEventBus ( ) . post ( new OntologyCreatedEvent ( new Date ( ) , modelUri ) ) ; } result = result & isStored ; } } } return result ; |
public class StopWatchFactory { /** * Returns a StopWatchFactory that has been configured previously . May return
* null , if the factory has not been configured .
* @ param loggerName name to search for .
* @ return A factory , or null , if not found . */
public static StopWatchFactory getInstance ( String loggerName ) throws ConfigurationException { } } | StopWatchFactory swf = getFactories ( ) . get ( loggerName ) ; if ( swf == null ) throw new ConfigurationException ( "No logger by the name " + loggerName + " found." ) ; return swf ; |
public class JsAdminServiceImpl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . admin . JsAdminService # listMessagingEngines ( ) */
public Enumeration listMessagingEngines ( ) { } } | if ( ! isInitialized ( ) ) { Vector v = new Vector ( ) ; return v . elements ( ) ; } return _jsmain . listMessagingEngines ( ) ; |
public class SimpleConfig { /** * Used by the JavaBean - based validator */
static void checkValid ( Path path , ConfigValueType referenceType , AbstractConfigValue value , List < ConfigException . ValidationProblem > accumulator ) { } } | if ( haveCompatibleTypes ( referenceType , value ) ) { if ( referenceType == ConfigValueType . LIST && value instanceof SimpleConfigObject ) { // attempt conversion of indexed object to list
AbstractConfigValue listValue = DefaultTransformer . transform ( value , ConfigValueType . LIST ) ; if ( ! ( listValue instanceof SimpleConfigList ) ) addWrongType ( accumulator , referenceType , value , path ) ; } } else { addWrongType ( accumulator , referenceType , value , path ) ; } |
public class ElemIf { /** * Call the children visitors .
* @ param visitor The visitor whose appropriate method will be called . */
protected void callChildVisitors ( XSLTVisitor visitor , boolean callAttrs ) { } } | if ( callAttrs ) m_test . getExpression ( ) . callVisitors ( m_test , visitor ) ; super . callChildVisitors ( visitor , callAttrs ) ; |
public class CalculateSortedRank { /** * The output column names
* This will often be the same as the input
* @ return the output column names */
@ Override public String [ ] outputColumnNames ( ) { } } | List < String > columnNames = inputSchema . getColumnNames ( ) ; columnNames . add ( newColumnName ) ; return columnNames . toArray ( new String [ columnNames . size ( ) ] ) ; |
public class JsonConfig { /** * Removes an annotation that marks a field to be skipped when building . < br >
* [ Java - & gt ; JSON ] */
public void addIgnoreFieldAnnotation ( Class annotationClass ) { } } | if ( annotationClass != null && ! ignoreFieldAnnotations . contains ( annotationClass . getName ( ) ) ) { ignoreFieldAnnotations . add ( annotationClass . getName ( ) ) ; } |
public class BatchExecutor { /** * < p > batch . < / p >
* @ param actions a { @ link java . util . List } object .
* @ return an array of { @ link org . apache . hadoop . hbase . client . Result } objects .
* @ throws java . io . IOException if any . */
public Result [ ] batch ( List < ? extends Row > actions ) throws IOException { } } | try { Object [ ] resultsOrErrors = new Object [ actions . size ( ) ] ; batchCallback ( actions , resultsOrErrors , null ) ; // At this point we are guaranteed that the array only contains results ,
// if it had any errors , batch would ' ve thrown an exception
Result [ ] results = new Result [ resultsOrErrors . length ] ; System . arraycopy ( resultsOrErrors , 0 , results , 0 , results . length ) ; return results ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . error ( "Encountered exception in batch(List<>)." , e ) ; throw new IOException ( "Batch error" , e ) ; } |
public class SparseShortVector { /** * Sets the dimensionality to the new value .
* @ param dimensionality the new dimensionality
* @ throws IllegalArgumentException if the given dimensionality is too small
* to cover the given values ( i . e . , the maximum index of any value not
* zero is bigger than the given dimensionality ) */
@ Override public void setDimensionality ( int dimensionality ) throws IllegalArgumentException { } } | final int maxdim = getMaxDim ( ) ; if ( maxdim > dimensionality ) { throw new IllegalArgumentException ( "Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")." ) ; } this . dimensionality = dimensionality ; |
public class QueueService { /** * Returns the local queue statistics for the given name and
* partition ID . If this node is the owner for the partition ,
* returned stats contain { @ link LocalQueueStats # getOwnedItemCount ( ) } ,
* otherwise it contains { @ link LocalQueueStats # getBackupItemCount ( ) } .
* @ param name the name of the queue for which the statistics are returned
* @ param partitionId the partition ID for which the statistics are returned
* @ return the statistics */
public LocalQueueStats createLocalQueueStats ( String name , int partitionId ) { } } | LocalQueueStatsImpl stats = getLocalQueueStatsImpl ( name ) ; stats . setOwnedItemCount ( 0 ) ; stats . setBackupItemCount ( 0 ) ; QueueContainer container = containerMap . get ( name ) ; if ( container == null ) { return stats ; } Address thisAddress = nodeEngine . getClusterService ( ) . getThisAddress ( ) ; IPartition partition = partitionService . getPartition ( partitionId , false ) ; Address owner = partition . getOwnerOrNull ( ) ; if ( thisAddress . equals ( owner ) ) { stats . setOwnedItemCount ( container . size ( ) ) ; } else if ( owner != null ) { stats . setBackupItemCount ( container . backupSize ( ) ) ; } container . setStats ( stats ) ; return stats ; |
public class CurvedArrow { /** * Draws the text on the high point of the arc . The text drawn is none other
* than the label for this object , as retrieved from < CODE > getLabel < / CODE > .
* @ param g
* the graphics object to draw the text upon */
public void drawText ( Graphics2D g ) { } } | // We don ' t want to corrupt the graphics environs with our
// affine transforms !
Graphics2D g2 = ( Graphics2D ) g . create ( ) ; g2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; g2 . transform ( affineToText ) ; // What about the text label ?
FontMetrics metrics = g2 . getFontMetrics ( ) ; bounds = metrics . getStringBounds ( getLabel ( ) , g2 ) ; // Will the label appear to be upside down ?
boolean upsideDown = end . x < start . x ; float dx = ( float ) bounds . getWidth ( ) / 2.0f ; float dy = ( curvy < 0.0f ) ^ upsideDown ? metrics . getAscent ( ) : - metrics . getDescent ( ) ; bounds . setRect ( bounds . getX ( ) - dx , bounds . getY ( ) + dy , bounds . getWidth ( ) , bounds . getHeight ( ) ) ; for ( int i = 0 ; i < label . length ( ) ; i += CHARS_PER_STEP ) { String sublabel = label . substring ( i , Math . min ( i + CHARS_PER_STEP , label . length ( ) ) ) ; g2 . drawString ( sublabel , - dx , dy ) ; dx -= metrics . getStringBounds ( sublabel , g2 ) . getWidth ( ) ; } // g2 . drawString ( label , - dx , dy ) ;
g2 . dispose ( ) ; /* * if ( GRAPHICS = = null ) { GRAPHICS = g . create ( ) ; METRICS =
* GRAPHICS . getFontMetrics ( ) ; } */ |
public class EntityReferenceResolverDecorator { /** * Resolve entity references */
@ Override public Iterator < Entity > iterator ( ) { } } | Stream < Entity > entities = delegate ( ) . findAll ( new QueryImpl < > ( ) ) ; return resolveEntityReferences ( entities ) . iterator ( ) ; |
public class Img { /** * 图像切割为圆形 ( 按指定起点坐标和半径切割 )
* @ param x 原图的x坐标起始位置
* @ param y 原图的y坐标起始位置
* @ param radius 半径 , 小于0表示填充满整个图片 ( 直径取长宽最小值 )
* @ return this
* @ since 4.1.15 */
public Img cut ( int x , int y , int radius ) { } } | final BufferedImage srcImage = getValidSrcImg ( ) ; final int width = srcImage . getWidth ( ) ; final int height = srcImage . getHeight ( ) ; // 计算直径
final int diameter = radius > 0 ? radius * 2 : Math . min ( width , height ) ; final BufferedImage targetImage = new BufferedImage ( diameter , diameter , BufferedImage . TYPE_INT_ARGB ) ; final Graphics2D g = targetImage . createGraphics ( ) ; g . setClip ( new Ellipse2D . Double ( 0 , 0 , diameter , diameter ) ) ; if ( this . positionBaseCentre ) { x = x - width / 2 + diameter / 2 ; y = y - height / 2 + diameter / 2 ; } g . drawImage ( srcImage , x , y , null ) ; g . dispose ( ) ; this . targetImage = targetImage ; return this ; |
public class TextRankKeyword { /** * 返回全部分词结果和对应的rank
* @ param content
* @ return */
public Map < String , Float > getTermAndRank ( String content ) { } } | assert content != null ; List < Term > termList = defaultSegment . seg ( content ) ; return getTermAndRank ( termList ) ; |
public class GraniteUiSyntheticResource { /** * Create synthetic resource child resource of the given parent resource .
* @ param parentResource Parent resource ( has to be a { @ link GraniteUiSyntheticResource } instance )
* @ param name Child resource name
* @ param resourceType Resource type
* @ param valueMap Properties
* @ return Resource */
public static Resource child ( @ NotNull Resource parentResource , @ NotNull String name , @ NotNull String resourceType , @ NotNull ValueMap valueMap ) { } } | Resource child = new GraniteUiSyntheticResource ( parentResource . getResourceResolver ( ) , parentResource . getPath ( ) + "/" + name , resourceType , valueMap , ImmutableList . < Resource > of ( ) ) ; if ( parentResource instanceof GraniteUiSyntheticResource ) { ( ( GraniteUiSyntheticResource ) parentResource ) . addChild ( child ) ; } else { throw new IllegalArgumentException ( "Resource is not a GraniteUiSyntheticResource." ) ; } return child ; |
public class PygmentsGenerator2 { /** * Create the content of the " _ _ init _ _ . py " file .
* @ param it the content .
* @ param basename the basename . */
protected void generatePythonPackage ( IStyleAppendable it , String basename ) { } } | it . appendNl ( "# -*- coding: {0} -*-" , getCodeConfig ( ) . getEncoding ( ) . toLowerCase ( ) ) ; // $ NON - NLS - 1 $
it . appendHeader ( ) ; it . newLine ( ) ; it . append ( "__all__ = [ ]" ) ; // $ NON - NLS - 1 $
it . newLine ( ) ; |
public class CLI { /** * Set a Properties object with the CLI parameters for evaluation .
* @ param model
* the model parameter
* @ param testset
* the reference set
* @ param corpusFormat
* the format of the testset
* @ param netypes
* the ne types to use in the evaluation
* @ return the properties object */
private Properties setEvalProperties ( final String language , final String model , final String testset , final String corpusFormat , final String netypes , final String clearFeatures , final String unknownAccuracy ) { } } | final Properties evalProperties = new Properties ( ) ; evalProperties . setProperty ( "language" , language ) ; evalProperties . setProperty ( "model" , model ) ; evalProperties . setProperty ( "testset" , testset ) ; evalProperties . setProperty ( "corpusFormat" , corpusFormat ) ; evalProperties . setProperty ( "types" , netypes ) ; evalProperties . setProperty ( "clearFeatures" , clearFeatures ) ; evalProperties . setProperty ( "unknownAccuracy" , unknownAccuracy ) ; return evalProperties ; |
public class MetadataExtractor { /** * Processes the information about the join tables that were collected during table model extraction .
* @ param result The already processed tables
* @ param joinTableMetadata The metadata about the join tables */
private static void processJoinTables ( final List < TableModel > result , final Map < String , Map < Table , List < String > > > joinTableMetadata ) { } } | joinTableMetadata . entrySet ( ) . forEach ( entry -> { LOG . debug ( "Processing join tables for {}. Found {} join tables to process" , entry . getKey ( ) , entry . getValue ( ) . size ( ) ) ; final TableModel model = getModelBySQLName ( result , entry . getKey ( ) ) ; if ( model == null ) { LOG . error ( "Could not find table {} in the already generated models! This should not happen!" , entry . getKey ( ) ) ; throw new NullPointerException ( "Table model not found" ) ; } entry . getValue ( ) . entrySet ( ) . forEach ( tableListEntry -> { LOG . debug ( "Processing join table {}" , tableListEntry . getKey ( ) . getFullName ( ) ) ; final TableModel joinTable = getModelBySQLName ( result , tableListEntry . getKey ( ) . getFullName ( ) ) ; if ( joinTable == null ) { LOG . error ( "Could not find join table {} in the already generated models! This should not happen!" , entry . getKey ( ) ) ; throw new NullPointerException ( "Table model not found" ) ; } JoinTableSimplifierModel jtsModel = new JoinTableSimplifierModel ( ) ; jtsModel . setName ( joinTable . getName ( ) ) ; for ( ForeignKeyModel fKModel : joinTable . getForeignKeys ( ) ) { if ( fKModel . getJavaTypeName ( ) . equals ( model . getClassName ( ) ) ) { jtsModel . getConstructorParams ( ) . add ( "this" ) ; } else { jtsModel . getConstructorParams ( ) . add ( fKModel . getName ( ) ) ; jtsModel . getMethodParams ( ) . put ( fKModel . getJavaTypeName ( ) , fKModel . getName ( ) ) ; } } model . getJoinTableSimplifierData ( ) . put ( joinTable . getClassName ( ) , jtsModel ) ; } ) ; } ) ; |
public class NodeSequence { /** * Calling this with a value of false will cause the nodeset
* to be cached .
* @ see DTMIterator # allowDetachToRelease ( boolean ) */
public void allowDetachToRelease ( boolean allowRelease ) { } } | if ( ( false == allowRelease ) && ! hasCache ( ) ) { setShouldCacheNodes ( true ) ; } if ( null != m_iter ) m_iter . allowDetachToRelease ( allowRelease ) ; super . allowDetachToRelease ( allowRelease ) ; |
public class DominatorsAnalysisFactory { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . classfile . IAnalysisEngine # analyze ( edu . umd . cs . findbugs
* . classfile . IAnalysisCache , java . lang . Object ) */
@ Override public DominatorsAnalysis analyze ( IAnalysisCache analysisCache , MethodDescriptor descriptor ) throws CheckedAnalysisException { } } | CFG cfg = getCFG ( analysisCache , descriptor ) ; DepthFirstSearch dfs = getDepthFirstSearch ( analysisCache , descriptor ) ; DominatorsAnalysis analysis = new DominatorsAnalysis ( cfg , dfs , true ) ; Dataflow < java . util . BitSet , DominatorsAnalysis > dataflow = new Dataflow < > ( cfg , analysis ) ; dataflow . execute ( ) ; return analysis ; |
public class Tile { /** * Defines if the indicator of the maxMeasuredValue should be visible .
* @ param VISIBLE */
public void setMaxMeasuredValueVisible ( final boolean VISIBLE ) { } } | if ( null == maxMeasuredValueVisible ) { _maxMeasuredValueVisible = VISIBLE ; fireTileEvent ( VISIBILITY_EVENT ) ; } else { maxMeasuredValueVisible . set ( VISIBLE ) ; } |
public class KnowledgeOperations { /** * Sets the faults .
* @ param message the message
* @ param operation the operation
* @ param contextOverrides the context overrides */
public static void setFaults ( Message message , KnowledgeOperation operation , Map < String , Object > contextOverrides ) { } } | setOutputsOrFaults ( message , operation . getFaultExpressionMappings ( ) , contextOverrides , FAULT , false ) ; |
public class GobblinServiceFlowConfigResourceHandler { /** * Adding { @ link FlowConfig } should check if current node is active ( master ) .
* If current node is active , call { @ link FlowConfigResourceLocalHandler # createFlowConfig ( FlowConfig ) } directly .
* If current node is standby , forward { @ link ServiceConfigKeys # HELIX _ FLOWSPEC _ ADD } to active . The remote active will
* then call { @ link FlowConfigResourceLocalHandler # createFlowConfig ( FlowConfig ) } .
* Please refer to { @ link org . apache . gobblin . service . modules . core . ControllerUserDefinedMessageHandlerFactory } for remote handling .
* For better I / O load balance , user can enable { @ link GobblinServiceFlowConfigResourceHandler # flowCatalogLocalCommit } .
* The { @ link FlowConfig } will be then persisted to { @ link org . apache . gobblin . runtime . spec _ catalog . FlowCatalog } first before it is
* forwarded to active node ( if current node is standby ) for execution . */
@ Override public CreateResponse createFlowConfig ( FlowConfig flowConfig ) throws FlowConfigLoggedException { } } | String flowName = flowConfig . getId ( ) . getFlowName ( ) ; String flowGroup = flowConfig . getId ( ) . getFlowGroup ( ) ; checkHelixConnection ( ServiceConfigKeys . HELIX_FLOWSPEC_ADD , flowName , flowGroup ) ; try { if ( ! jobScheduler . isActive ( ) && helixManager . isPresent ( ) ) { CreateResponse response = null ; if ( this . flowCatalogLocalCommit ) { // We will handle FS I / O locally for load balance before forwarding to remote node .
response = this . localHandler . createFlowConfig ( flowConfig , false ) ; } if ( ! flowConfig . hasExplain ( ) || ! flowConfig . isExplain ( ) ) { // Forward the message to master only if it is not an " explain " request .
forwardMessage ( ServiceConfigKeys . HELIX_FLOWSPEC_ADD , FlowConfigUtils . serializeFlowConfig ( flowConfig ) , flowName , flowGroup ) ; } // Do actual work on remote node , directly return success
return response == null ? new CreateResponse ( new ComplexResourceKey < > ( flowConfig . getId ( ) , new EmptyRecord ( ) ) , HttpStatus . S_201_CREATED ) : response ; } else { return this . localHandler . createFlowConfig ( flowConfig ) ; } } catch ( IOException e ) { throw new FlowConfigLoggedException ( HttpStatus . S_500_INTERNAL_SERVER_ERROR , "Cannot create flowConfig [flowName=" + flowName + " flowGroup=" + flowGroup + "]" , e ) ; } |
public class RegistryAuth { /** * Construct a Builder based upon the " auth " field of the docker client config file . */
public static Builder forAuth ( final String auth ) { } } | // split with limit = 2 to catch case where password contains a colon
final String [ ] authParams = Base64 . decodeAsString ( auth ) . split ( ":" , 2 ) ; if ( authParams . length != 2 ) { return builder ( ) ; } return builder ( ) . username ( authParams [ 0 ] . trim ( ) ) . password ( authParams [ 1 ] . trim ( ) ) ; |
public class LinkedTransferQueue { /** * Returns { @ code true } if this queue contains no elements .
* @ return { @ code true } if this queue contains no elements */
public boolean isEmpty ( ) { } } | for ( Node p = head ; p != null ; p = succ ( p ) ) { if ( ! p . isMatched ( ) ) return ! p . isData ; } return true ; |
public class CmsResourceTypeXmlContent { /** * Returns the edit handler if configured . < p >
* @ param cms the cms context
* @ return the edit handler */
public I_CmsEditHandler getEditHandler ( CmsObject cms ) { } } | String schema = getSchema ( ) ; try { CmsXmlContentDefinition contentDefinition = CmsXmlContentDefinition . unmarshal ( cms , schema ) ; // get the content handler for the resource type to create
I_CmsXmlContentHandler handler = contentDefinition . getContentHandler ( ) ; return handler . getEditHandler ( ) ; } catch ( CmsXmlException e ) { LOG . error ( e . getMessage ( ) , e ) ; } return null ; |
public class Files { /** * Reads the contents of a file into a byte array .
* The file is always closed .
* @ param file the file to read , must not be { @ code null }
* @ return the file contents , never { @ code null }
* @ throws IOException in case of an I / O error
* @ since 1.1 */
public static byte [ ] bytes ( File file ) throws IOException { } } | InputStream in = null ; try { in = openInputStream ( file ) ; return toByteArray ( in , file . length ( ) ) ; } finally { closeQuietly ( in ) ; } |
public class InternalPureXbaseLexer { /** * $ ANTLR start " T _ _ 70" */
public final void mT__70 ( ) throws RecognitionException { } } | try { int _type = T__70 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalPureXbase . g : 68:7 : ( ' null ' )
// InternalPureXbase . g : 68:9 : ' null '
{ match ( "null" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class Data { /** * Like java . net . URLDecoder except no conversion of " + " to space */
public static String decode ( String urlString ) { } } | StringBuilder s = new StringBuilder ( ) ; UrlDecoderState state = UrlDecoderState . INITIAL ; int hiNibble = 0 ; int lowNibble = 0 ; int length = urlString . length ( ) ; for ( int i = 0 ; i < length ; i ++ ) { char c = urlString . charAt ( i ) ; switch ( state ) { case INITIAL : if ( c == '%' ) { state = UrlDecoderState . FIRST_HEX ; } else { s . append ( c ) ; } break ; case FIRST_HEX : hiNibble = lookup ( c ) ; state = UrlDecoderState . SECOND_HEX ; break ; case SECOND_HEX : lowNibble = lookup ( c ) ; byte b = ( byte ) ( ( hiNibble << 4 ) | lowNibble ) ; s . append ( ( char ) b ) ; state = UrlDecoderState . INITIAL ; break ; } } return s . toString ( ) ; |
public class JoinedQueryExecutor { /** * Returns the count of exactly matching properties from the two
* orderings . The match must be consecutive and start at the first
* property . */
private static < T extends Storable > int commonOrderingCount ( OrderingList < T > orderingA , OrderingList < T > orderingB ) { } } | int commonCount = Math . min ( orderingA . size ( ) , orderingB . size ( ) ) ; for ( int i = 0 ; i < commonCount ; i ++ ) { if ( ! orderingA . get ( i ) . equals ( orderingB . get ( i ) ) ) { return i ; } } return commonCount ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcPermeableCoveringOperationEnum ( ) { } } | if ( ifcPermeableCoveringOperationEnumEEnum == null ) { ifcPermeableCoveringOperationEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1027 ) ; } return ifcPermeableCoveringOperationEnumEEnum ; |
public class BatchStatementBuilder { /** * Adds new statements to the batch .
* @ return this builder ; never { @ code null } .
* @ see BatchStatement # addAll ( Iterable ) */
@ NonNull public BatchStatementBuilder addStatements ( @ NonNull Iterable < BatchableStatement < ? > > statements ) { } } | int delta = Iterables . size ( statements ) ; if ( statementsCount + delta > 0xFFFF ) { throw new IllegalStateException ( "Batch statement cannot contain more than " + 0xFFFF + " statements." ) ; } statementsCount += delta ; statementsBuilder . addAll ( statements ) ; return this ; |
public class AbstractViewQuery { /** * Notify a ListView that the data of it ' s adapter is changed .
* @ return self */
public T dataChanged ( ) { } } | if ( view instanceof AdapterView ) { AdapterView < ? > av = ( AdapterView < ? > ) view ; Adapter a = av . getAdapter ( ) ; if ( a instanceof BaseAdapter ) { BaseAdapter ba = ( BaseAdapter ) a ; ba . notifyDataSetChanged ( ) ; } } return self ( ) ; |
public class WebSocketB2BUASipServlet { /** * { @ inheritDoc } */
protected void doInvite ( SipServletRequest request ) throws ServletException , IOException { } } | request . getSession ( ) . setAttribute ( "lastRequest" , request ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "Simple Servlet: Got request:\n" + request . getMethod ( ) ) ; } SipServletRequest outRequest = sipFactory . createRequest ( request . getApplicationSession ( ) , "INVITE" , request . getFrom ( ) . getURI ( ) , request . getTo ( ) . getURI ( ) ) ; String user = ( ( SipURI ) request . getTo ( ) . getURI ( ) ) . getUser ( ) ; Address calleeAddress = registeredUsersToIp . get ( user ) ; if ( calleeAddress == null ) { request . createResponse ( SipServletResponse . SC_NOT_FOUND ) . send ( ) ; return ; } outRequest . setRequestURI ( calleeAddress . getURI ( ) ) ; if ( request . getContent ( ) != null ) { outRequest . setContent ( request . getContent ( ) , request . getContentType ( ) ) ; } outRequest . send ( ) ; sessions . put ( request . getSession ( ) , outRequest . getSession ( ) ) ; sessions . put ( outRequest . getSession ( ) , request . getSession ( ) ) ; |
public class AbstractOperationContext { /** * Check that each emitted notification is properly described by its source . */
private void checkUndefinedNotification ( Notification notification ) { } } | String type = notification . getType ( ) ; PathAddress source = notification . getSource ( ) ; Map < String , NotificationEntry > descriptions = getRootResourceRegistration ( ) . getNotificationDescriptions ( source , true ) ; if ( ! descriptions . keySet ( ) . contains ( type ) ) { missingNotificationDescriptionWarnings . add ( ControllerLogger . ROOT_LOGGER . notificationIsNotDescribed ( type , source ) ) ; } |
public class SAX2DTM2 { /** * Receive notification of the end of the document .
* @ throws SAXException Any SAX exception , possibly
* wrapping another exception .
* @ see org . xml . sax . ContentHandler # endDocument */
public void endDocument ( ) throws SAXException { } } | super . endDocument ( ) ; // Add a NULL entry to the end of the node arrays as
// the end indication .
m_exptype . addElement ( NULL ) ; m_parent . addElement ( NULL ) ; m_nextsib . addElement ( NULL ) ; m_firstch . addElement ( NULL ) ; // Set the cached references after the document is built .
m_extendedTypes = m_expandedNameTable . getExtendedTypes ( ) ; m_exptype_map = m_exptype . getMap ( ) ; m_nextsib_map = m_nextsib . getMap ( ) ; m_firstch_map = m_firstch . getMap ( ) ; m_parent_map = m_parent . getMap ( ) ; |
public class Actions { /** * Equivalent to calling :
* < i > Actions . click ( element ) . sendKeys ( keysToSend ) . < / i >
* This method is different from { @ link WebElement # sendKeys ( CharSequence . . . ) } - see
* { @ link # sendKeys ( CharSequence . . . ) } for details how .
* @ see # sendKeys ( java . lang . CharSequence [ ] )
* @ param target element to focus on .
* @ param keys The keys .
* @ return A self reference .
* @ throws IllegalArgumentException if keys is null */
public Actions sendKeys ( WebElement target , CharSequence ... keys ) { } } | if ( isBuildingActions ( ) ) { action . addAction ( new SendKeysAction ( jsonKeyboard , jsonMouse , ( Locatable ) target , keys ) ) ; } return focusInTicks ( target ) . sendKeysInTicks ( keys ) ; |
public class StringUtils { /** * < p > Joins the elements of the provided < code > Iterator < / code > into
* a single String containing the provided elements . < / p >
* < p > No delimiter is added before or after the list .
* A < code > null < / code > separator is the same as an empty String ( " " ) . < / p >
* < p > See the examples here : { @ link # join ( Object [ ] , String ) } . < / p >
* @ param iterator the < code > Iterator < / code > of values to join together , may be null
* @ param separator the separator character to use , null treated as " "
* @ return the joined String , < code > null < / code > if null iterator input */
public static String join ( Iterator iterator , String separator ) { } } | // handle null , zero and one elements before building a buffer
if ( iterator == null ) { return null ; } if ( ! iterator . hasNext ( ) ) { return EMPTY ; } Object first = iterator . next ( ) ; if ( ! iterator . hasNext ( ) ) { return toString ( first ) ; } // two or more elements
StringBuilder buf = new StringBuilder ( 256 ) ; // Java default is 16 , probably too small
if ( first != null ) { buf . append ( first ) ; } while ( iterator . hasNext ( ) ) { if ( separator != null ) { buf . append ( separator ) ; } Object obj = iterator . next ( ) ; if ( obj != null ) { buf . append ( obj ) ; } } return buf . toString ( ) ; |
public class EventDistributor { /** * Adds an listener for events that gets called when the event finished processing .
* It will register for all ids individually !
* This method will ignore if this listener is already listening to an Event .
* Method is thread - safe .
* @ param ids this can be type , or descriptors etc .
* @ param eventListener the ActivatorEventListener - interface for receiving activator events */
@ SuppressWarnings ( "SynchronizationOnLocalVariableOrMethodParameter" ) public void registerEventFinishedListener ( List < String > ids , EventListenerModel eventListener ) { } } | for ( String id : ids ) { ArrayList < EventListenerModel > listenersList = finishListeners . get ( id ) ; if ( listenersList == null ) { finishListeners . put ( id , new ArrayList < > ( ) ) ; listenersList = finishListeners . get ( id ) ; } if ( ! listenersList . contains ( eventListener ) ) { synchronized ( listenersList ) { listenersList . add ( eventListener ) ; } } } |
public class KubeConfigUtils { /** * Returns the current user token for the config and current context
* @ param config Config object
* @ param context Context object
* @ return returns current user based upon provided parameters . */
public static String getUserToken ( Config config , Context context ) { } } | AuthInfo authInfo = getUserAuthInfo ( config , context ) ; if ( authInfo != null ) { return authInfo . getToken ( ) ; } return null ; |
public class JavaSerializationHelper { /** * Serialize a Java object into a bytes array .
* @ param o the object to serialize
* @ return the bytes array of the serialized object */
public byte [ ] serializeToBytes ( final Serializable o ) { } } | byte [ ] bytes = null ; try ( final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; final ObjectOutputStream oos = new ObjectOutputStream ( baos ) ) { oos . writeObject ( o ) ; oos . flush ( ) ; bytes = baos . toByteArray ( ) ; } catch ( final IOException e ) { logger . warn ( "cannot Java serialize object" , e ) ; } return bytes ; |
public class HadoopJobUtils { /** * Based on the HADOOP _ SECURITY _ MANAGER _ CLASS _ PARAM setting in the incoming props , finds the
* correct HadoopSecurityManager Java class
* @ return a HadoopSecurityManager object . Will throw exception if any errors occur ( including not
* finding a class )
* @ throws RuntimeException : If any errors happen along the way . */
public static HadoopSecurityManager loadHadoopSecurityManager ( Props props , Logger log ) throws RuntimeException { } } | Class < ? > hadoopSecurityManagerClass = props . getClass ( HADOOP_SECURITY_MANAGER_CLASS_PARAM , true , HadoopJobUtils . class . getClassLoader ( ) ) ; log . info ( "Loading hadoop security manager " + hadoopSecurityManagerClass . getName ( ) ) ; HadoopSecurityManager hadoopSecurityManager = null ; try { Method getInstanceMethod = hadoopSecurityManagerClass . getMethod ( "getInstance" , Props . class ) ; hadoopSecurityManager = ( HadoopSecurityManager ) getInstanceMethod . invoke ( hadoopSecurityManagerClass , props ) ; } catch ( InvocationTargetException e ) { String errMsg = "Could not instantiate Hadoop Security Manager " + hadoopSecurityManagerClass . getName ( ) + e . getCause ( ) ; log . error ( errMsg ) ; throw new RuntimeException ( errMsg , e ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } return hadoopSecurityManager ; |
public class Bzip2HuffmanStageDecoder { /** * Constructs Huffman decoding tables from lists of Canonical Huffman code lengths . */
void createHuffmanDecodingTables ( ) { } } | final int alphabetSize = this . alphabetSize ; for ( int table = 0 ; table < tableCodeLengths . length ; table ++ ) { final int [ ] tableBases = codeBases [ table ] ; final int [ ] tableLimits = codeLimits [ table ] ; final int [ ] tableSymbols = codeSymbols [ table ] ; final byte [ ] codeLengths = tableCodeLengths [ table ] ; int minimumLength = HUFFMAN_DECODE_MAX_CODE_LENGTH ; int maximumLength = 0 ; // Find the minimum and maximum code length for the table
for ( int i = 0 ; i < alphabetSize ; i ++ ) { final byte currLength = codeLengths [ i ] ; maximumLength = Math . max ( currLength , maximumLength ) ; minimumLength = Math . min ( currLength , minimumLength ) ; } minimumLengths [ table ] = minimumLength ; // Calculate the first output symbol for each code length
for ( int i = 0 ; i < alphabetSize ; i ++ ) { tableBases [ codeLengths [ i ] + 1 ] ++ ; } for ( int i = 1 , b = tableBases [ 0 ] ; i < HUFFMAN_DECODE_MAX_CODE_LENGTH + 2 ; i ++ ) { b += tableBases [ i ] ; tableBases [ i ] = b ; } // Calculate the first and last Huffman code for each code length ( codes at a given
// length are sequential in value )
for ( int i = minimumLength , code = 0 ; i <= maximumLength ; i ++ ) { int base = code ; code += tableBases [ i + 1 ] - tableBases [ i ] ; tableBases [ i ] = base - tableBases [ i ] ; tableLimits [ i ] = code - 1 ; code <<= 1 ; } // Populate the mapping from canonical code index to output symbol
for ( int bitLength = minimumLength , codeIndex = 0 ; bitLength <= maximumLength ; bitLength ++ ) { for ( int symbol = 0 ; symbol < alphabetSize ; symbol ++ ) { if ( codeLengths [ symbol ] == bitLength ) { tableSymbols [ codeIndex ++ ] = symbol ; } } } } currentTable = selectors [ 0 ] ; |
public class FilesImpl { /** * Gets the properties of the specified task file .
* @ param jobId The ID of the job that contains the task .
* @ param taskId The ID of the task whose file you want to get the properties of .
* @ param filePath The path to the task file that you want to get the properties of .
* @ param fileGetPropertiesFromTaskOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws BatchErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void getPropertiesFromTask ( String jobId , String taskId , String filePath , FileGetPropertiesFromTaskOptions fileGetPropertiesFromTaskOptions ) { } } | getPropertiesFromTaskWithServiceResponseAsync ( jobId , taskId , filePath , fileGetPropertiesFromTaskOptions ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class PartialViewContextImpl { /** * Extension for
* https : / / issues . apache . org / jira / browse / MYFACES - 2841
* internal extension which detects that the submit is an iframe request
* will be changed for the official version which will come in 2.1
* @ return true if the current request is an iframe based ajax request */
public boolean isIFrameRequest ( ) { } } | if ( _iframeRequest == null ) { _iframeRequest = _facesContext . getExternalContext ( ) . getRequestParameterMap ( ) . containsKey ( PARTIAL_IFRAME ) ; } return _iframeRequest ; |
public class SupportLogFormatter { /** * Copied from hudson . Functions , but with external references removed : */
public static String printThrowable ( Throwable t ) { } } | if ( t == null ) { return "No Exception details" ; } StringBuilder s = new StringBuilder ( ) ; doPrintStackTrace ( s , t , null , "" , new HashSet < Throwable > ( ) ) ; return s . toString ( ) ; |
public class ExternalContentHandler { /** * Find the content type for a remote resource
* @ param url of remote resource
* @ return the content type reported by remote system or " application / octet - stream " if not supplied */
private MediaType findContentType ( final String url ) { } } | if ( url == null ) { return null ; } if ( url . startsWith ( "file" ) ) { return APPLICATION_OCTET_STREAM_TYPE ; } else if ( url . startsWith ( "http" ) ) { try ( CloseableHttpClient httpClient = HttpClients . createDefault ( ) ) { final HttpHead httpHead = new HttpHead ( url ) ; try ( CloseableHttpResponse response = httpClient . execute ( httpHead ) ) { if ( response . getStatusLine ( ) . getStatusCode ( ) == SC_OK ) { final Header contentType = response . getFirstHeader ( CONTENT_TYPE ) ; if ( contentType != null ) { return MediaType . valueOf ( contentType . getValue ( ) ) ; } } } } catch ( final IOException e ) { LOGGER . warn ( "Unable to retrieve external content from {} due to {}" , url , e . getMessage ( ) ) ; } catch ( final Exception e ) { throw new RepositoryRuntimeException ( e ) ; } } LOGGER . debug ( "Defaulting to octet stream for media type" ) ; return APPLICATION_OCTET_STREAM_TYPE ; |
public class AbstractID { /** * Converts a long to a byte array .
* @ param l the long variable to be converted
* @ param ba the byte array to store the result the of the conversion
* @ param offset offset indicating at what position inside the byte array the result of the conversion shall be stored */
private static void longToByteArray ( long l , byte [ ] ba , int offset ) { } } | for ( int i = 0 ; i < SIZE_OF_LONG ; ++ i ) { final int shift = i << 3 ; // i * 8
ba [ offset + SIZE_OF_LONG - 1 - i ] = ( byte ) ( ( l & ( 0xffL << shift ) ) >>> shift ) ; } |
public class JTrees { /** * Returns the user object from the given tree node . If the given node
* object is < code > null < / code > or not a DefaultMutableTreeNode ,
* then < code > null < / code > is returned .
* @ param nodeObject The node object
* @ return The user object */
public static Object getUserObjectFromTreeNode ( Object nodeObject ) { } } | if ( nodeObject == null ) { return null ; } if ( nodeObject instanceof DefaultMutableTreeNode ) { DefaultMutableTreeNode node = ( DefaultMutableTreeNode ) nodeObject ; Object userObject = node . getUserObject ( ) ; return userObject ; } return null ; |
public class ApolloStoreOperation { /** * Schedules operation to be executed in dispatcher
* @ param callback to be notified about operation result */
public void enqueue ( @ Nullable final Callback < T > callback ) { } } | checkIfExecuted ( ) ; this . callback . set ( callback ) ; dispatcher . execute ( new Runnable ( ) { @ Override public void run ( ) { T result ; try { result = perform ( ) ; } catch ( Exception e ) { notifyFailure ( new ApolloException ( "Failed to perform store operation" , e ) ) ; return ; } notifySuccess ( result ) ; } } ) ; |
public class route6 { /** * Use this API to fetch filtered set of route6 resources .
* filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */
public static route6 [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } } | route6 obj = new route6 ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; route6 [ ] response = ( route6 [ ] ) obj . getfiltered ( service , option ) ; return response ; |
public class CommandHelper { /** * Convert the value according the type of DeviceData .
* @ param value
* the value to insert on DeviceData
* @ param deviceDataArgin
* the DeviceData attribute to write
* @ param dataType
* the type of inserted data
* @ throws DevFailed */
public static void insertFromDevVarDoubleStringArray ( final DevVarDoubleStringArray value , final DeviceData deviceDataArgin , final int dataType ) throws DevFailed { } } | if ( dataType == TangoConst . Tango_DEVVAR_DOUBLESTRINGARRAY ) { deviceDataArgin . insert ( value ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type " + deviceDataArgin . getType ( ) + " not supported" , "CommandHelper.insertFromDevVarDoubleStringArray(DevVarDoubleStringArray value,deviceDataArgin)" ) ; } |
public class XAbstractFeatureCallImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setImplicitReceiver ( XExpression newImplicitReceiver ) { } } | if ( newImplicitReceiver != implicitReceiver ) { NotificationChain msgs = null ; if ( implicitReceiver != null ) msgs = ( ( InternalEObject ) implicitReceiver ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_RECEIVER , null , msgs ) ; if ( newImplicitReceiver != null ) msgs = ( ( InternalEObject ) newImplicitReceiver ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_RECEIVER , null , msgs ) ; msgs = basicSetImplicitReceiver ( newImplicitReceiver , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_RECEIVER , newImplicitReceiver , newImplicitReceiver ) ) ; |
public class CommerceOrderLocalServiceUtil { /** * Creates a new commerce order with the primary key . Does not add the commerce order to the database .
* @ param commerceOrderId the primary key for the new commerce order
* @ return the new commerce order */
public static com . liferay . commerce . model . CommerceOrder createCommerceOrder ( long commerceOrderId ) { } } | return getService ( ) . createCommerceOrder ( commerceOrderId ) ; |
public class ResourceBundlesHandlerImpl { /** * Joins and post process the variant composite bundle
* @ param composite
* the composite bundle
* @ param status
* the status
* @ param compositeBundleVariants
* the variants */
private void joinAndPostProcessBundle ( CompositeResourceBundle composite , BundleProcessingStatus status ) { } } | JoinableResourceBundleContent store ; stopProcessIfNeeded ( ) ; List < Map < String , String > > allVariants = VariantUtils . getAllVariants ( composite . getVariants ( ) ) ; // Add the default bundle variant ( the non variant one )
allVariants . add ( null ) ; // Process all variants
for ( Map < String , String > variants : allVariants ) { status . setBundleVariants ( variants ) ; store = new JoinableResourceBundleContent ( ) ; for ( JoinableResourceBundle childbundle : composite . getChildBundles ( ) ) { if ( ! childbundle . getInclusionPattern ( ) . isIncludeOnlyOnDebug ( ) ) { JoinableResourceBundleContent childContent = joinAndPostprocessBundle ( childbundle , variants , status ) ; // Do unitary postprocessing .
status . setProcessingType ( BundleProcessingStatus . FILE_PROCESSING_TYPE ) ; StringBuffer content = executeUnitaryPostProcessing ( composite , status , childContent . getContent ( ) , this . unitaryCompositePostProcessor ) ; childContent . setContent ( content ) ; store . append ( childContent ) ; } } // Post process composite bundle as needed
store = postProcessJoinedCompositeBundle ( composite , store . getContent ( ) , status ) ; String variantKey = VariantUtils . getVariantKey ( variants ) ; String name = VariantUtils . getVariantBundleName ( composite . getId ( ) , variantKey , false ) ; storeBundle ( name , store ) ; initBundleDataHashcode ( composite , store , variantKey ) ; } |
public class ConnectionTypesInner { /** * Retrieve the connectiontype identified by connectiontype name .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param connectionTypeName The name of connectiontype .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ConnectionTypeInner > getAsync ( String resourceGroupName , String automationAccountName , String connectionTypeName , final ServiceCallback < ConnectionTypeInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , automationAccountName , connectionTypeName ) , serviceCallback ) ; |
public class CPAttachmentFileEntryPersistenceImpl { /** * Returns a range of all the cp attachment file entries where classNameId = & # 63 ; and classPK = & # 63 ; and type = & # 63 ; and status & ne ; & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPAttachmentFileEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param classNameId the class name ID
* @ param classPK the class pk
* @ param type the type
* @ param status the status
* @ param start the lower bound of the range of cp attachment file entries
* @ param end the upper bound of the range of cp attachment file entries ( not inclusive )
* @ return the range of matching cp attachment file entries */
@ Override public List < CPAttachmentFileEntry > findByC_C_T_NotST ( long classNameId , long classPK , int type , int status , int start , int end ) { } } | return findByC_C_T_NotST ( classNameId , classPK , type , status , start , end , null ) ; |
public class VariableEvaluator { /** * Replaces list variable expressions in raw string values */
Object processVariableLists ( Object rawValue , ExtendedAttributeDefinition attributeDef , EvaluationContext context , boolean ignoreWarnings ) throws ConfigEvaluatorException { } } | if ( attributeDef != null && ! attributeDef . resolveVariables ( ) ) return rawValue ; if ( rawValue instanceof List ) { List < Object > returnList = new ArrayList < Object > ( ) ; List < Object > values = ( List < Object > ) rawValue ; for ( Object o : values ) { Object processed = processVariableLists ( o , attributeDef , context , ignoreWarnings ) ; if ( processed instanceof List ) returnList . addAll ( ( List < Object > ) processed ) ; else returnList . add ( processed ) ; } return returnList ; } else if ( rawValue instanceof String ) { // Look for functions of the form $ { list ( variableName ) } first
Matcher matcher = XMLConfigConstants . VAR_LIST_PATTERN . matcher ( ( String ) rawValue ) ; if ( matcher . find ( ) ) { String var = matcher . group ( 1 ) ; String rep = getProperty ( var , context , ignoreWarnings , true ) ; return rep == null ? rawValue : MetaTypeHelper . parseValue ( rep ) ; } else { return rawValue ; } } else { return rawValue ; } |
public class EscapeTool { /** * Takes a set of Identifiers , filters them to none CPE , and formats them
* for display in a CSV .
* @ param ids the set of identifiers
* @ return the formatted list of none CPE identifiers */
public String csvIdentifiers ( Set < Identifier > ids ) { } } | if ( ids == null || ids . isEmpty ( ) ) { return "\"\"" ; } boolean addComma = false ; final StringBuilder sb = new StringBuilder ( ) ; for ( Identifier id : ids ) { if ( addComma ) { sb . append ( ", " ) ; } else { addComma = true ; } sb . append ( id . getValue ( ) ) ; } if ( sb . length ( ) == 0 ) { return "\"\"" ; } return StringEscapeUtils . escapeCsv ( sb . toString ( ) ) ; |
public class MavenHelper { /** * Build the map of dependencies for the current plugin .
* @ return the artifact .
* @ throws MojoExecutionException if the current plugin cannot be determined . */
public synchronized Map < String , Dependency > getPluginDependencies ( ) throws MojoExecutionException { } } | if ( this . pluginDependencies == null ) { final String groupId = getConfig ( "plugin.groupId" ) ; // $ NON - NLS - 1 $
final String artifactId = getConfig ( "plugin.artifactId" ) ; // $ NON - NLS - 1 $
final String pluginArtifactKey = ArtifactUtils . versionlessKey ( groupId , artifactId ) ; final Set < Artifact > dependencies = resolveDependencies ( pluginArtifactKey , true ) ; final Map < String , Dependency > deps = new TreeMap < > ( ) ; for ( final Artifact artifact : dependencies ) { final Dependency dep = toDependency ( artifact ) ; deps . put ( ArtifactUtils . versionlessKey ( artifact ) , dep ) ; } this . pluginDependencies = deps ; } return this . pluginDependencies ; |
public class PravegaTablesStoreHelper { /** * Method to retrieve the value for a given key from a table . This method takes a deserialization function and deserializes
* the received byte [ ] using the supplied function .
* @ param tableName tableName
* @ param key key
* @ param fromBytes deserialization function
* @ param < T > Type of deserialized object
* @ return CompletableFuture which when completed will have the versionedMetadata retrieved from the store . */
public < T > CompletableFuture < VersionedMetadata < T > > getEntry ( String tableName , String key , Function < byte [ ] , T > fromBytes ) { } } | log . trace ( "get entry called for : {} key : {}" , tableName , key ) ; List < TableKey < byte [ ] > > keys = Collections . singletonList ( new TableKeyImpl < > ( key . getBytes ( Charsets . UTF_8 ) , null ) ) ; CompletableFuture < VersionedMetadata < T > > result = new CompletableFuture < > ( ) ; String message = "get entry: key: %s table: %s" ; withRetries ( ( ) -> segmentHelper . readTable ( tableName , keys , authToken . get ( ) , RequestTag . NON_EXISTENT_ID ) , ( ) -> String . format ( message , key , tableName ) ) . thenApplyAsync ( x -> { TableEntry < byte [ ] , byte [ ] > first = x . get ( 0 ) ; if ( first . getKey ( ) . getVersion ( ) . equals ( KeyVersion . NOT_EXISTS ) ) { throw StoreException . create ( StoreException . Type . DATA_NOT_FOUND , String . format ( message , key , tableName ) ) ; } else { log . trace ( "returning entry for : {} key : {} with version {}" , tableName , key , first . getKey ( ) . getVersion ( ) . getSegmentVersion ( ) ) ; T deserialized = fromBytes . apply ( first . getValue ( ) ) ; return new VersionedMetadata < > ( deserialized , new Version . LongVersion ( first . getKey ( ) . getVersion ( ) . getSegmentVersion ( ) ) ) ; } } , executor ) . whenCompleteAsync ( ( r , e ) -> { if ( e != null ) { result . completeExceptionally ( e ) ; } else { result . complete ( r ) ; } } , executor ) ; return result ; |
public class AdHocRuleCreator { /** * Persists a new add hoc rule in the DB and indexes it .
* @ return the rule that was inserted in the DB , which < b > includes the generated ID < / b > . */
public RuleDto persistAndIndex ( DbSession dbSession , NewAdHocRule adHoc , OrganizationDto organizationDto ) { } } | RuleDao dao = dbClient . ruleDao ( ) ; Optional < RuleDto > existingRuleDtoOpt = dao . selectByKey ( dbSession , organizationDto , adHoc . getKey ( ) ) ; RuleMetadataDto metadata ; long now = system2 . now ( ) ; if ( ! existingRuleDtoOpt . isPresent ( ) ) { RuleDefinitionDto dto = new RuleDefinitionDto ( ) . setRuleKey ( adHoc . getKey ( ) ) . setIsExternal ( true ) . setIsAdHoc ( true ) . setName ( adHoc . getEngineId ( ) + ":" + adHoc . getRuleId ( ) ) . setScope ( ALL ) . setStatus ( READY ) . setCreatedAt ( now ) . setUpdatedAt ( now ) ; dao . insert ( dbSession , dto ) ; metadata = new RuleMetadataDto ( ) . setRuleId ( dto . getId ( ) ) . setOrganizationUuid ( organizationDto . getUuid ( ) ) ; } else { // No need to update the rule , only org specific metadata
RuleDto ruleDto = existingRuleDtoOpt . get ( ) ; Preconditions . checkState ( ruleDto . isExternal ( ) && ruleDto . isAdHoc ( ) ) ; metadata = ruleDto . getMetadata ( ) ; } if ( adHoc . hasDetails ( ) ) { boolean changed = false ; if ( ! Objects . equals ( metadata . getAdHocName ( ) , adHoc . getName ( ) ) ) { metadata . setAdHocName ( substring ( adHoc . getName ( ) , 0 , MAX_LENGTH_AD_HOC_NAME ) ) ; changed = true ; } if ( ! Objects . equals ( metadata . getAdHocDescription ( ) , adHoc . getDescription ( ) ) ) { metadata . setAdHocDescription ( substring ( adHoc . getDescription ( ) , 0 , MAX_LENGTH_AD_HOC_DESC ) ) ; changed = true ; } if ( ! Objects . equals ( metadata . getAdHocSeverity ( ) , adHoc . getSeverity ( ) ) ) { metadata . setAdHocSeverity ( adHoc . getSeverity ( ) ) ; changed = true ; } RuleType ruleType = requireNonNull ( adHoc . getRuleType ( ) , "Rule type should not be null" ) ; if ( ! Objects . equals ( metadata . getAdHocType ( ) , ruleType . getDbConstant ( ) ) ) { metadata . setAdHocType ( ruleType ) ; changed = true ; } if ( changed ) { metadata . setUpdatedAt ( now ) ; metadata . setCreatedAt ( now ) ; dao . insertOrUpdate ( dbSession , metadata ) ; } } RuleDto ruleDto = dao . selectOrFailByKey ( dbSession , organizationDto , adHoc . getKey ( ) ) ; ruleIndexer . commitAndIndex ( dbSession , ruleDto . getId ( ) ) ; return ruleDto ; |
public class ParagraphVectors { /** * This method implements batched inference , based on Java Future parallelism model .
* PLEASE NOTE : In order to use this method , LabelledDocument being passed in should have Id field defined .
* @ param document
* @ return */
public Future < Pair < String , INDArray > > inferVectorBatched ( @ NonNull LabelledDocument document ) { } } | if ( countSubmitted == null ) initInference ( ) ; if ( this . vocab == null || this . vocab . numWords ( ) == 0 ) reassignExistingModel ( ) ; // we block execution until queued amount of documents gets below acceptable level , to avoid memory exhaust
while ( countSubmitted . get ( ) - countFinished . get ( ) > 1024 ) { ThreadUtils . uncheckedSleep ( 50 ) ; } InferenceCallable callable = new InferenceCallable ( vocab , tokenizerFactory , document ) ; Future < Pair < String , INDArray > > future = inferenceExecutor . submit ( callable ) ; countSubmitted . incrementAndGet ( ) ; return future ; |
public class JedisUtils { /** * Create a new { @ link ShardedJedisPool } with default pool configs .
* @ param hostsAndPorts
* format { @ code host1 : port1 , host2 : port2 , . . . } , default Redis port is used if not
* specified
* @ param password
* @ return */
public static ShardedJedisPool newShardedJedisPool ( String hostsAndPorts , String password ) { } } | return newShardedJedisPool ( defaultJedisPoolConfig ( ) , hostsAndPorts , password ) ; |
public class RestClient { /** * Creates a URL filter for the < code > attribute < / code > where the < code > values < / code > are the valid values .
* @ param attributeName The attributes to use in the filter
* @ param values The valid values . Must not be < code > null < / code > or empty
* @ return The filter strings */
private String createListFilter ( FilterableAttribute attribute , Collection < String > values ) { } } | /* * This is more complicated than you ' d think . . . Some attribute values caused incompatible changes to the JSON data model so are actually stored in a different object in the
* JSON . Therefore the filter that we are constructing maybe pointing to one or two attributes in the JSON . We create a filter for both possible attributes and then only
* add the ones that we used . */
boolean firstFilter1Value = true ; boolean firstFilter2Value = true ; StringBuilder filter1 = new StringBuilder ( attribute . getAttributeName ( ) ) . append ( "=" ) ; StringBuilder filter2 = new StringBuilder ( ) ; Collection < String > filter2Values = attribute . getValuesInSecondaryAttributeName ( ) == null ? Collections . < String > emptySet ( ) : attribute . getValuesInSecondaryAttributeName ( ) ; if ( attribute . getSecondaryAttributeName ( ) != null ) { filter2 = filter2 . append ( attribute . getSecondaryAttributeName ( ) ) . append ( "=" ) ; } for ( String value : values ) { if ( filter2Values . contains ( value ) ) { try { value = URLEncoder . encode ( value , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { // If UTF - 8 encoding isn ' t supported we ' ll just have to try the unencoded string
} if ( firstFilter2Value ) { firstFilter2Value = false ; } else { // OR all types so we get them all
filter2 . append ( ENCODED_BAR ) ; } filter2 . append ( value ) ; } else { try { value = URLEncoder . encode ( value , "UTF-8" ) ; } catch ( UnsupportedEncodingException e ) { // If UTF - 8 encoding isn ' t supported we ' ll just have to try the unencoded string
} if ( firstFilter1Value ) { firstFilter1Value = false ; } else { // OR all types so we get them all
filter1 . append ( ENCODED_BAR ) ; } filter1 . append ( value ) ; } } if ( ! firstFilter1Value && ! firstFilter2Value ) { throw new IllegalArgumentException ( "Unable to filter values that come from two different JSON objects, attempted to filter " + attribute + " using values " + values ) ; } if ( ! firstFilter1Value ) { return filter1 . toString ( ) ; } if ( ! firstFilter2Value ) { return filter2 . toString ( ) ; } return null ; |
public class ListPipelinesResult { /** * An array of < code > Pipeline < / code > objects .
* @ param pipelines
* An array of < code > Pipeline < / code > objects . */
public void setPipelines ( java . util . Collection < Pipeline > pipelines ) { } } | if ( pipelines == null ) { this . pipelines = null ; return ; } this . pipelines = new com . amazonaws . internal . SdkInternalList < Pipeline > ( pipelines ) ; |
public class DraggableView { /** * Animates the view to become hidden .
* @ param diff
* The distance the view has to be vertically moved by , as an { @ link Integer } value
* @ param animationSpeed
* The speed of the animation in pixels per milliseconds as a { @ link Float } value
* @ param interpolator
* The interpolator , which should be used by the animation , as an instance of the type
* { @ link Interpolator } . The interpolator may not be null
* @ param cancel
* True , if the view should be canceled , false otherwise */
private void animateHideView ( final int diff , final float animationSpeed , @ NonNull final Interpolator interpolator , final boolean cancel ) { } } | animateView ( diff , animationSpeed , createAnimationListener ( false , cancel ) , interpolator ) ; |
public class GroupWsRef { /** * Creates a reference to a group by its organization and name . Virtual groups " Anyone " are
* supported .
* @ param organizationKey key of organization . If { @ code null } , then default organization will be used .
* @ param name non - null name . Can refer to anyone group ( case - insensitive { @ code " anyone " } ) . */
static GroupWsRef fromName ( @ Nullable String organizationKey , String name ) { } } | return new GroupWsRef ( NULL_ID , organizationKey , requireNonNull ( name ) ) ; |
public class TransientPortletEntityDao { /** * / * ( non - Javadoc )
* @ see org . apereo . portal . portlet . dao . IPortletEntityDao # getPortletEntitiesForUser ( int ) */
@ Override public Set < IPortletEntity > getPortletEntitiesForUser ( int userId ) { } } | final Set < IPortletEntity > portletEntities = this . delegatePortletEntityDao . getPortletEntitiesForUser ( userId ) ; return this . wrapEntities ( portletEntities ) ; |
public class Model { /** * Writes the property values of the given object to the writer .
* @ param baseVisibility
* This parameters controls how much data we ' d be writing ,
* by adding bias to the sub tree cutting .
* A property with { @ link Exported # visibility ( ) visibility } X will be written
* if the current depth Y and baseVisibility Z satisfies { @ code X + Z > Y } .
* 0 is the normal value . Positive value means writing bigger tree ,
* and negative value means writing smaller trees .
* @ deprecated as of 1.139 */
@ Deprecated public void writeTo ( T object , int baseVisibility , DataWriter writer ) throws IOException { } } | writeTo ( object , new ByDepth ( 1 - baseVisibility ) , writer ) ; |
public class RetouchedBloomFilter { /** * Computes the ratio A / FP . */
private void computeRatio ( ) { } } | for ( int i = 0 ; i < vectorSize ; i ++ ) { double keyWeight = getWeight ( keyVector [ i ] ) ; double fpWeight = getWeight ( fpVector [ i ] ) ; if ( keyWeight > 0 && fpWeight > 0 ) { ratio [ i ] = keyWeight / fpWeight ; } } |
public class LocalFileResource { /** * { @ inheritDoc } */
@ Override public void put ( InputStream source ) throws IOException { } } | if ( source == null ) return ; // Error condition checking in putStream ( directory , unknown file , etc . )
OutputStream os = putStream ( ) ; byte [ ] buffer = new byte [ 1024 ] ; try { int read ; while ( ( read = source . read ( buffer ) ) >= 0 ) { os . write ( buffer , 0 , read ) ; } } finally { os . close ( ) ; } |
public class DataService { /** * Method verifies that entity is included in special list which might have additional features ( like export as pdf )
* @ param entity
* @ param < T >
* @ return */
private < T extends IEntity > boolean isSpecialEntity ( T entity ) { } } | return ( entity instanceof Estimate ) || ( entity instanceof Invoice ) || ( entity instanceof SalesReceipt ) ; |
public class AbstractExtractionCondition { /** * { @ inheritDoc }
* @ see jp . co . future . uroborosql . fluent . ExtractionCondition # isNotNull ( java . lang . String ) */
@ SuppressWarnings ( "unchecked" ) @ Override public T isNotNull ( final String col ) { } } | context ( ) . param ( CaseFormat . CAMEL_CASE . convert ( col ) , new IsNotNull ( col ) ) ; this . useOperator = true ; return ( T ) this ; |
public class ExceptionSet { /** * Checks to see if the exception set is a singleton set containing just the
* named exception
* @ param exceptionName
* ( in dotted format )
* @ return true if it is */
public boolean isSingleton ( String exceptionName ) { } } | if ( size != 1 ) { return false ; } ObjectType e = iterator ( ) . next ( ) ; return e . toString ( ) . equals ( exceptionName ) ; |
public class IniFile { /** * Sets the specified string property .
* @ param pstrSection the INI section name .
* @ param pstrProp the property to be set .
* @ pstrVal the string value to be persisted */
public void setStringProperty ( String pstrSection , String pstrProp , String pstrVal , String pstrComments ) { } } | INISection objSec = null ; objSec = ( INISection ) this . mhmapSections . get ( pstrSection ) ; if ( objSec == null ) { objSec = new INISection ( pstrSection ) ; this . mhmapSections . put ( pstrSection , objSec ) ; } objSec . setProperty ( pstrProp , pstrVal , pstrComments ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.