signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DFSClient { /** * Get a listing of the indicated directory */ public FileStatus [ ] listPaths ( String src ) throws IOException { } }
checkOpen ( ) ; metrics . incLsCalls ( ) ; try { if ( namenodeProtocolProxy == null ) { return versionBasedListPath ( src ) ; } return methodBasedListPath ( src ) ; } catch ( RemoteException re ) { throw re . unwrapRemoteException ( AccessControlException . class ) ; }
public class Participants { /** * The list of users . * @ param users * The list of users . */ public void setUsers ( java . util . Collection < UserMetadata > users ) { } }
if ( users == null ) { this . users = null ; return ; } this . users = new java . util . ArrayList < UserMetadata > ( users ) ;
public class SqlRepository { /** * Use with care . Removes all content of all tables . Should only * be used for JUnit tests . */ public void clear ( ) { } }
List < AbstractTable < ? > > tableList = new ArrayList < AbstractTable < ? > > ( tables . values ( ) ) ; for ( AbstractTable < ? > table : tableList ) { table . clear ( ) ; }
public class StructureAlignmentOptimizer { /** * run the optimization * @ param maxi maximum nr . of iterations * @ throws StructureException */ public void runOptimization ( int maxi ) throws StructureException { } }
superimposeBySet ( ) ; if ( debug ) System . err . println ( " initial rmsd " + rmsd ) ; // if ( showAlig ) // showCurrentAlignment ( equLen , equSet , " after initial superimposeBySet Len : " + equLen + " rmsd : " + rmsd ) ; maxKeepStep = 4 ; keepStep = 0 ; optimize ( maxi ) ;
public class VirtualMachinesInner { /** * Lists all available virtual machine sizes to which the specified virtual machine can be resized . * @ param resourceGroupName The name of the resource group . * @ param vmName The name of the virtual machine . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; VirtualMachineSizeInner & gt ; object if successful . */ public List < VirtualMachineSizeInner > listAvailableSizes ( String resourceGroupName , String vmName ) { } }
return listAvailableSizesWithServiceResponseAsync ( resourceGroupName , vmName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class JBBPCompilerUtils { /** * Find a named field info index in a list for its path . * @ param fieldPath a field path , it must not be null . * @ param namedFields a list contains named field info items . * @ return the index of a field for the path if found one , - 1 otherwise */ public static int findIndexForFieldPath ( final String fieldPath , final List < JBBPNamedFieldInfo > namedFields ) { } }
final String normalized = JBBPUtils . normalizeFieldNameOrPath ( fieldPath ) ; int result = - 1 ; for ( int i = namedFields . size ( ) - 1 ; i >= 0 ; i -- ) { final JBBPNamedFieldInfo f = namedFields . get ( i ) ; if ( normalized . equals ( f . getFieldPath ( ) ) ) { result = i ; break ; } } return result ;
public class TimeZoneFormat { /** * Returns offset from GMT ( UTC ) in milliseconds for the given localized GMT * offset format string . When the given string cannot be parsed , this method * sets the current position as the error index to < code > ParsePosition pos < / code > * and returns 0. * @ param text the text contains a localized GMT offset string at the position . * @ param pos the position . * @ return the offset from GMT ( UTC ) in milliseconds for the given localized GMT * offset format string . * @ see # formatOffsetLocalizedGMT ( int ) */ public int parseOffsetLocalizedGMT ( String text , ParsePosition pos ) { } }
return parseOffsetLocalizedGMT ( text , pos , false , null ) ;
public class HdfsOutputSwitcher { /** * ベースのファイル名称を取得する 。 * @ param baseDir 出力先ディレクトリパス * @ param fileNameHeader ファイル名ヘッダ * @ param fileNameBody ファイル名ボディ * @ param dateFormat 日付部フォーマット * @ param targetDate 算出時刻 * @ return ベースファイル名称 */ private String generateOutputFileBase ( String baseDir , String fileNameHeader , String fileNameBody , DateFormat dateFormat , long targetDate ) { } }
StringBuilder baseFileNameBuilder = new StringBuilder ( ) ; baseFileNameBuilder . append ( baseDir ) . append ( fileNameHeader ) . append ( fileNameBody ) ; baseFileNameBuilder . append ( dateFormat . format ( new Date ( targetDate ) ) ) ; String result = baseFileNameBuilder . toString ( ) ; return result ;
public class ClassDoc { /** * Get a MethodDoc in this ClassDoc with a name and signature * matching that of the specified MethodDoc and accepted by the * specified MethodFinder */ public MethodDoc getMatchingMethod ( MethodDoc method , MethodFinder mf ) { } }
MethodDoc md = getMatchingMethod ( method ) ; if ( md != null ) { if ( mf . checkMethod ( md ) ) { return md ; } } return null ;
public class ElementUI { /** * Apply / remove the design context menu to / from the specified component . If applying the design * context menu , any existing context menu is saved . When removing the context menu , any saved * context menu is restored . * @ param component Component to which to apply / remove the design context menu . * @ param contextMenu The design menu if design mode is activated , or null if it is not . */ protected void setDesignContextMenu ( BaseUIComponent component , Menupopup contextMenu ) { } }
component . setAttribute ( CONTEXT_MENU , contextMenu ) ; if ( contextMenu == null ) { SavedState . restore ( component ) ; applyHint ( ) ; } else { new SavedState ( component ) ; component . setContext ( contextMenu ) ; component . setHint ( getDefinition ( ) . getName ( ) ) ; }
public class LinuxMIMEDatabase { /** * Creates a map with file extensions as keys pointing on sets of MIME types * associated with them . * @ return Map of file extensions pointing on MIME types * @ throws OSException */ public final Map < String , Set < String > > getExtensionMimeMap ( ) throws OSException { } }
initializeMimeExtensionArrays ( ) ; return Collections . unmodifiableMap ( extensionMime ) ;
public class DefaultPojoQuery { @ Override protected Object [ ] convertParameters ( Object [ ] params ) { } }
Iterator < Object > parameterIterator = Iterators . forArray ( params ) ; List < Object > converted = parameterConverters . stream ( ) . map ( parameterBinding -> parameterBinding . convertParameter ( parameterIterator ) ) . collect ( Collectors . toList ( ) ) ; return converted . toArray ( new Object [ converted . size ( ) ] ) ;
public class PdfContentByte { /** * Moves to the start of the next line , offset from the start of the current line . * As a side effect , this sets the leading parameter in the text state . < / P > * @ param x offset of the new current point * @ param y y - coordinate of the new current point */ public void moveTextWithLeading ( float x , float y ) { } }
state . xTLM += x ; state . yTLM += y ; state . leading = - y ; content . append ( x ) . append ( ' ' ) . append ( y ) . append ( " TD" ) . append_i ( separator ) ;
public class CPInstancePersistenceImpl { /** * Returns the first cp instance in the ordered set where groupId = & # 63 ; and status = & # 63 ; . * @ param groupId the group ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp instance * @ throws NoSuchCPInstanceException if a matching cp instance could not be found */ @ Override public CPInstance findByG_ST_First ( long groupId , int status , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } }
CPInstance cpInstance = fetchByG_ST_First ( groupId , status , orderByComparator ) ; if ( cpInstance != null ) { return cpInstance ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", status=" ) ; msg . append ( status ) ; msg . append ( "}" ) ; throw new NoSuchCPInstanceException ( msg . toString ( ) ) ;
public class EntityFinder { /** * Returns the DNASequence or null if one can ' t be created * @ param str * @ return */ private static DNASequence getDNASequence ( String str ) { } }
try { DNASequence s = new DNASequence ( str ) ; return s ; } catch ( CompoundNotFoundException e ) { logger . error ( "Unexpected error when creating DNASequence " , e ) ; } return null ;
public class CnvTfsBoolean { /** * < p > Convert from string . < / p > * @ param pAddParam additional params , e . g . IRequestData * to fill owner itsVersion . * @ param pStrVal string representation * @ return Boolean value * @ throws Exception - an exception */ @ Override public final Boolean fromString ( final Map < String , Object > pAddParam , final String pStrVal ) throws Exception { } }
if ( pStrVal == null // HTTP checkbox return nothing if unchecked || "" . equals ( pStrVal ) || "false" . equals ( pStrVal ) || "off" . equals ( pStrVal ) ) { return Boolean . FALSE ; } return Boolean . TRUE ;
public class HttpUtil { /** * Executes a HTTP request with the cookie spec set to IGNORE _ COOKIES * @ param httpRequest HttpRequestBase * @ param retryTimeout retry timeout * @ param injectSocketTimeout injecting socket timeout * @ param canceling canceling ? * @ return response * @ throws SnowflakeSQLException if Snowflake error occurs * @ throws IOException raises if a general IO error occurs */ static String executeRequestWithoutCookies ( HttpRequestBase httpRequest , int retryTimeout , int injectSocketTimeout , AtomicBoolean canceling ) throws SnowflakeSQLException , IOException { } }
return executeRequestInternal ( httpRequest , retryTimeout , injectSocketTimeout , canceling , true , false , true ) ;
public class CmsResourceUtil { /** * Returns the style class to use for the given resource . < p > * @ return style class name * @ see org . opencms . workplace . list . CmsListExplorerColumn # getExplorerStyleDef ( ) */ public String getStyleClassName ( ) { } }
if ( isInsideProject ( ) && isEditable ( ) ) { if ( m_resource . getState ( ) . isChanged ( ) ) { return "fc" ; } else if ( m_resource . getState ( ) . isNew ( ) ) { return "fn" ; } else if ( m_resource . getState ( ) . isDeleted ( ) ) { return "fd" ; } else { return "nf" ; } } return "fp" ;
public class WriteRequest { /** * Adds a DataPoint to the request for a Series . * @ param series The Series to write to . * @ param datapoint The DataPoint to write . * @ return The updated request . * @ since 1.0.0 */ public WriteRequest add ( Series series , DataPoint datapoint ) { } }
WritableDataPoint mdp = new WritableDataPoint ( series , datapoint . getTimestamp ( ) , datapoint . getValue ( ) ) ; data . add ( mdp ) ; return this ;
public class CmsEditLoginMessageDialog { /** * Creates the dialog HTML for all defined widgets of the named dialog ( page ) . < p > * This overwrites the method from the super class to create a layout variation for the widgets . < p > * @ param dialog the dialog ( page ) to get the HTML for * @ return the dialog HTML for all defined widgets of the named dialog ( page ) */ @ Override protected String createDialogHtml ( String dialog ) { } }
StringBuffer result = new StringBuffer ( 1024 ) ; // create widget table result . append ( createWidgetTableStart ( ) ) ; // show error header once if there were validation errors result . append ( createWidgetErrorHeader ( ) ) ; // create the widgets for the first dialog page result . append ( dialogBlockStart ( key ( Messages . GUI_EDITOR_LABEL_ACTIVATE_BLOCK_0 ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 0 , 0 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; result . append ( dialogBlockStart ( key ( Messages . GUI_EDITOR_LABEL_CONFIGURATION_BLOCK_0 ) ) ) ; result . append ( createWidgetTableStart ( ) ) ; result . append ( createDialogRowsHtml ( 1 , 4 ) ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; // close widget table result . append ( createWidgetTableEnd ( ) ) ; return result . toString ( ) ;
public class AerospikeRecordReader { /** * - - - - - NEW API - - - - - */ @ Override public void initialize ( InputSplit split , TaskAttemptContext context ) throws IOException { } }
log . info ( "INITIALIZE" ) ; init ( ( AerospikeSplit ) split ) ;
public class Iobeam { /** * Registers a device with the same parameters as the provided { @ link Device } . This call is * < b > BLOCKING < / b > and should not be called on UI threads . It will make a network call and not * return until it finishes . If device is null , a new { @ link Device } with a random ID and name * will be generated and its ID returned . If the client already has a device ID set , a null * device parameter will return the current ID . * @ param device A { @ link Device } to be registered with iobeam . If ID is not set , a random one * will be assigned . Its name will also be randomly generated if unassigned , or * set to the ID if ID is provided but not name . * @ return The device ID associated with this client . * @ throws ApiException Thrown if the iobeam client is not initialized or there are problems * writing the device ID . * @ throws IOException Thrown if network errors occur while trying to register . */ public String registerDevice ( Device device ) throws ApiException , IOException { } }
boolean alreadySet = this . deviceId != null ; // If device ID is set and not explicitly asking for a different one , return current ID . if ( alreadySet && ( device == null || this . deviceId . equals ( device . getId ( ) ) ) ) { setDeviceId ( this . deviceId ) ; return this . deviceId ; } // Make sure to unset before attempting , so as not to reuse old ID if it fails . this . deviceId = null ; DeviceService . Add req = prepareDeviceRequest ( device ) ; String id = req . execute ( ) . getId ( ) ; setDeviceId ( id ) ; return this . deviceId ;
public class Strings { /** * Test if given name is Java member like name . If < code > name < / code > is null return false . * @ param name name to test . * @ return true if given name is Java member like name . */ public static boolean isMemberName ( String name ) { } }
if ( name == null ) { return false ; } Matcher matcher = MEMBER_NAME_PATTERN . matcher ( name ) ; return matcher . find ( ) ;
public class ActiveSyncManager { /** * Perform various checks of stopping a sync point . * @ param syncPoint sync point to stop * @ return the path resolution result if successfully passed all checks */ @ Nullable public MountTable . Resolution resolveSyncPoint ( AlluxioURI syncPoint ) throws InvalidPathException { } }
if ( ! mSyncPathList . contains ( syncPoint ) ) { LOG . debug ( "syncPoint not found {}" , syncPoint . getPath ( ) ) ; return null ; } MountTable . Resolution resolution = mMountTable . resolve ( syncPoint ) ; return resolution ;
public class ArrayContainer { /** * in order */ private void emit ( short val ) { } }
if ( cardinality == content . length ) { increaseCapacity ( true ) ; } content [ cardinality ++ ] = val ;
public class StringGroovyMethods { /** * Set the position of the given Matcher to the given index . * @ param matcher a Matcher * @ param idx the index number * @ since 1.0 */ public static void setIndex ( Matcher matcher , int idx ) { } }
int count = getCount ( matcher ) ; if ( idx < - count || idx >= count ) { throw new IndexOutOfBoundsException ( "index is out of range " + ( - count ) + ".." + ( count - 1 ) + " (index = " + idx + ")" ) ; } if ( idx == 0 ) { matcher . reset ( ) ; } else if ( idx > 0 ) { matcher . reset ( ) ; for ( int i = 0 ; i < idx ; i ++ ) { matcher . find ( ) ; } } else if ( idx < 0 ) { matcher . reset ( ) ; idx += getCount ( matcher ) ; for ( int i = 0 ; i < idx ; i ++ ) { matcher . find ( ) ; } }
public class DatanodeDescriptor { /** * Store block invalidation work . */ void addBlocksToBeInvalidated ( List < Block > blocklist ) { } }
assert ( blocklist != null && blocklist . size ( ) > 0 ) ; synchronized ( invalidateBlocks ) { for ( Block blk : blocklist ) { invalidateBlocks . add ( blk ) ; } }
public class MultiChangeBuilder { /** * Replaces a range of characters with the given rich - text document . */ public MultiChangeBuilder < PS , SEG , S > replaceAbsolutely ( int start , int end , StyledDocument < PS , SEG , S > replacement ) { } }
return absoluteReplace ( start , end , ReadOnlyStyledDocument . from ( replacement ) ) ;
public class Operator { /** * Sets the degree of parallelism for this operator . * The degree must be 1 or more . * @ param dop The degree of parallelism for this operator . * @ return The operator with set degree of parallelism . */ public O setParallelism ( int dop ) { } }
if ( dop < 1 ) { throw new IllegalArgumentException ( "The parallelism of an operator must be at least 1." ) ; } this . dop = dop ; @ SuppressWarnings ( "unchecked" ) O returnType = ( O ) this ; return returnType ;
public class RObjectsPanel { /** * GEN - LAST : event _ _ addActionPerformed */ private void _delActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ _ delActionPerformed int [ ] i = _oList . getSelectedRows ( ) ; String [ ] o = new String [ i . length ] ; for ( int j = 0 ; j < i . length ; j ++ ) { o [ j ] = ( String ) _oList . getValueAt ( i [ j ] , 0 ) ; } if ( R != null ) { try { R . rm ( o ) ; } catch ( Rsession . RException ex ) { ex . printStackTrace ( ) ; } } update ( ) ;
public class CouchDBClient { /** * Gets the json from response . * @ param response * the response * @ return the json from response * @ throws IOException * Signals that an I / O exception has occurred . */ private JsonArray getJsonFromResponse ( HttpResponse response ) throws IOException { } }
InputStream content = response . getEntity ( ) . getContent ( ) ; Reader reader = new InputStreamReader ( content ) ; JsonObject json = gson . fromJson ( reader , JsonObject . class ) ; JsonElement jsonElement = json . get ( "rows" ) ; return jsonElement == null ? null : jsonElement . getAsJsonArray ( ) ;
public class ICUResourceBundle { /** * Return a set of the locale names supported by a collection of resource * bundles . * @ param bundlePrefix the prefix of the resource bundles to use . */ public static Set < String > getAvailableLocaleNameSet ( String bundlePrefix , ClassLoader loader ) { } }
return getAvailEntry ( bundlePrefix , loader ) . getLocaleNameSet ( ) ;
public class RESTClient { /** * Verifies that the received image is identical to the original one . * @ param xopOriginal * @ param xopResponse */ private void verifyXopResponse ( XopBean xopOriginal , XopBean xopResponse ) { } }
if ( ! Arrays . equals ( xopResponse . getBytes ( ) , xopOriginal . getBytes ( ) ) ) { throw new RuntimeException ( "Received XOP attachment is corrupted" ) ; } System . out . println ( ) ; System . out . println ( "XOP attachment has been successfully received" ) ;
public class ECKey { /** * Gets the private key in the form of an integer field element . The public key * is derived by performing EC point addition this number of times ( i . e . point * multiplying ) . * @ return - * @ throws java . lang . IllegalStateException * if the private key bytes are not available . */ public BigInteger getPrivKey ( ) { } }
if ( privKey == null ) { throw new MissingPrivateKeyException ( ) ; } else if ( privKey instanceof BCECPrivateKey ) { return ( ( BCECPrivateKey ) privKey ) . getD ( ) ; } else { throw new MissingPrivateKeyException ( ) ; }
public class HikariDataSource { /** * Evict a connection from the pool . If the connection has already been closed ( returned to the pool ) * this may result in a " soft " eviction ; the connection will be evicted sometime in the future if it is * currently in use . If the connection has not been closed , the eviction is immediate . * @ param connection the connection to evict from the pool */ public void evictConnection ( Connection connection ) { } }
HikariPool p ; if ( ! isClosed ( ) && ( p = pool ) != null && connection . getClass ( ) . getName ( ) . startsWith ( "com.zaxxer.hikari" ) ) { p . evictConnection ( connection ) ; }
public class dnsnsrec { /** * Use this API to add dnsnsrec . */ public static base_response add ( nitro_service client , dnsnsrec resource ) throws Exception { } }
dnsnsrec addresource = new dnsnsrec ( ) ; addresource . domain = resource . domain ; addresource . nameserver = resource . nameserver ; addresource . ttl = resource . ttl ; return addresource . add_resource ( client ) ;
public class DropwizardApnsClientMetricsListener { /** * Records a successful attempt to send a notification and updates metrics accordingly . * @ param apnsClient the client that sent the notification ; note that this is ignored by * { @ code DropwizardApnsClientMetricsListener } instances , which should always be used for exactly one client * @ param notificationId an opaque , unique identifier for the notification that was sent */ @ Override public void handleNotificationSent ( final ApnsClient apnsClient , final long notificationId ) { } }
this . sentNotifications . mark ( ) ; this . notificationTimerContexts . put ( notificationId , this . notificationTimer . time ( ) ) ;
public class PathAndQuery { /** * Validates the { @ link String } that contains an absolute path and a query , and splits them into * the path part and the query part . If the path is usable ( e . g . , can be served a successful response from * the server and doesn ' t have variable path parameters ) , { @ link PathAndQuery # storeInCache ( String ) } should * be called to cache the parsing result for faster future invocations . * @ return a { @ link PathAndQuery } with the absolute path and query , or { @ code null } if the specified * { @ link String } is not an absolute path or invalid . */ @ Nullable public static PathAndQuery parse ( @ Nullable String rawPath ) { } }
if ( CACHE != null && rawPath != null ) { final PathAndQuery parsed = CACHE . getIfPresent ( rawPath ) ; if ( parsed != null ) { return parsed ; } } return splitPathAndQuery ( rawPath ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcSensor ( ) { } }
if ( ifcSensorEClass == null ) { ifcSensorEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 591 ) ; } return ifcSensorEClass ;
public class DSClient { /** * ( non - Javadoc ) * @ see com . impetus . client . cassandra . CassandraClientBase # executeUpdateDeleteQuery ( java . lang . String ) */ public int executeUpdateDeleteQuery ( String cqlQuery ) { } }
Session session = null ; try { if ( log . isInfoEnabled ( ) ) { log . info ( "Executing cql query {}." , cqlQuery ) ; } session = factory . getConnection ( ) ; KunderaCoreUtils . printQuery ( cqlQuery , showQuery ) ; session . execute ( cqlQuery ) ; } finally { // factory . releaseConnection ( session ) ; } // TODO : can ' t find a way to return number of updated records . return 0 ;
public class RobotUtil { /** * 模拟键盘点击 < br > * 包括键盘的按下和释放 * @ param keyCodes 按键码列表 , 见 { @ link java . awt . event . KeyEvent } * @ since 4.5.7 */ public static void keyClick ( int ... keyCodes ) { } }
for ( int keyCode : keyCodes ) { robot . keyPress ( keyCode ) ; robot . keyRelease ( keyCode ) ; } delay ( ) ;
public class MapUtil { /** * 对一个Map按Value进行排序 , 返回排序LinkedHashMap , 最多只返回n条 , 多用于Value是Counter的情况 . * @ param reverse 按Value的倒序 or 正序排列 */ public static < K , V extends Comparable > Map < K , V > topNByValue ( Map < K , V > map , final boolean reverse , int n ) { } }
return topNByValueInternal ( map , n , reverse ? Ordering . from ( new ComparableEntryValueComparator < K , V > ( ) ) . reverse ( ) : new ComparableEntryValueComparator < K , V > ( ) ) ;
public class SmartBinder { /** * Using the argument names and order in the target Signature , permute the * arguments in this SmartBinder . Arguments may be duplicated or omitted * in the target Signature , but all arguments in the target must be defined * in this SmartBinder . * @ param target the Signature from which to derive a new argument list * @ return a new SmartBinder with the permute applied */ public SmartBinder permute ( Signature target ) { } }
return new SmartBinder ( this , target , binder . permute ( signature ( ) . to ( target ) ) ) ;
public class ConnectionDescriptorXmlHandler { /** * startElement callback . * Only some Elements need special start operations . * @ throws MetadataException indicating mapping errors */ public void startElement ( String uri , String name , String qName , Attributes atts ) { } }
boolean isDebug = logger . isDebugEnabled ( ) ; try { switch ( getLiteralId ( qName ) ) { case JDBC_CONNECTION_DESCRIPTOR : { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( JDBC_CONNECTION_DESCRIPTOR ) ) ; JdbcConnectionDescriptor newJcd = new JdbcConnectionDescriptor ( ) ; currentAttributeContainer = newJcd ; conDesList . add ( newJcd ) ; m_CurrentJCD = newJcd ; // set the jcdAlias attribute String jcdAlias = atts . getValue ( tags . getTagById ( JCD_ALIAS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( JCD_ALIAS ) + ": " + jcdAlias ) ; m_CurrentJCD . setJcdAlias ( jcdAlias ) ; // set the jcdAlias attribute String defaultConnection = atts . getValue ( tags . getTagById ( DEFAULT_CONNECTION ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( DEFAULT_CONNECTION ) + ": " + defaultConnection ) ; m_CurrentJCD . setDefaultConnection ( Boolean . valueOf ( defaultConnection ) . booleanValue ( ) ) ; if ( m_CurrentJCD . isDefaultConnection ( ) ) { if ( defaultConnectionFound ) { throw new MetadataException ( "Found two jdbc-connection-descriptor elements with default-connection=\"true\"" ) ; } else { defaultConnectionFound = true ; } } // set platform attribute String platform = atts . getValue ( tags . getTagById ( DBMS_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( DBMS_NAME ) + ": " + platform ) ; m_CurrentJCD . setDbms ( platform ) ; // set jdbc - level attribute String level = atts . getValue ( tags . getTagById ( JDBC_LEVEL ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( JDBC_LEVEL ) + ": " + level ) ; m_CurrentJCD . setJdbcLevel ( level ) ; // set driver attribute String driver = atts . getValue ( tags . getTagById ( DRIVER_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( DRIVER_NAME ) + ": " + driver ) ; m_CurrentJCD . setDriver ( driver ) ; // set protocol attribute String protocol = atts . getValue ( tags . getTagById ( URL_PROTOCOL ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( URL_PROTOCOL ) + ": " + protocol ) ; m_CurrentJCD . setProtocol ( protocol ) ; // set subprotocol attribute String subprotocol = atts . getValue ( tags . getTagById ( URL_SUBPROTOCOL ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( URL_SUBPROTOCOL ) + ": " + subprotocol ) ; m_CurrentJCD . setSubProtocol ( subprotocol ) ; // set the dbalias attribute String dbalias = atts . getValue ( tags . getTagById ( URL_DBALIAS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( URL_DBALIAS ) + ": " + dbalias ) ; m_CurrentJCD . setDbAlias ( dbalias ) ; // set the datasource attribute String datasource = atts . getValue ( tags . getTagById ( DATASOURCE_NAME ) ) ; // check for empty String if ( datasource != null && datasource . trim ( ) . equals ( "" ) ) datasource = null ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( DATASOURCE_NAME ) + ": " + datasource ) ; m_CurrentJCD . setDatasourceName ( datasource ) ; // set the user attribute String user = atts . getValue ( tags . getTagById ( USER_NAME ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( USER_NAME ) + ": " + user ) ; m_CurrentJCD . setUserName ( user ) ; // set the password attribute String password = atts . getValue ( tags . getTagById ( USER_PASSWD ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( USER_PASSWD ) + ": " + password ) ; m_CurrentJCD . setPassWord ( password ) ; // set eager - release attribute String eagerRelease = atts . getValue ( tags . getTagById ( EAGER_RELEASE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( EAGER_RELEASE ) + ": " + eagerRelease ) ; m_CurrentJCD . setEagerRelease ( Boolean . valueOf ( eagerRelease ) . booleanValue ( ) ) ; // set batch - mode attribute String batchMode = atts . getValue ( tags . getTagById ( BATCH_MODE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( BATCH_MODE ) + ": " + batchMode ) ; m_CurrentJCD . setBatchMode ( Boolean . valueOf ( batchMode ) . booleanValue ( ) ) ; // set useAutoCommit attribute String useAutoCommit = atts . getValue ( tags . getTagById ( USE_AUTOCOMMIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( USE_AUTOCOMMIT ) + ": " + useAutoCommit ) ; m_CurrentJCD . setUseAutoCommit ( Integer . valueOf ( useAutoCommit ) . intValue ( ) ) ; // set ignoreAutoCommitExceptions attribute String ignoreAutoCommitExceptions = atts . getValue ( tags . getTagById ( IGNORE_AUTOCOMMIT_EXCEPTION ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( IGNORE_AUTOCOMMIT_EXCEPTION ) + ": " + ignoreAutoCommitExceptions ) ; m_CurrentJCD . setIgnoreAutoCommitExceptions ( Boolean . valueOf ( ignoreAutoCommitExceptions ) . booleanValue ( ) ) ; break ; } case CONNECTION_POOL : { if ( m_CurrentJCD != null ) { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( CONNECTION_POOL ) ) ; final ConnectionPoolDescriptor m_CurrentCPD = m_CurrentJCD . getConnectionPoolDescriptor ( ) ; this . currentAttributeContainer = m_CurrentCPD ; String maxActive = atts . getValue ( tags . getTagById ( CON_MAX_ACTIVE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_MAX_ACTIVE ) + ": " + maxActive ) ; if ( checkString ( maxActive ) ) m_CurrentCPD . setMaxActive ( Integer . parseInt ( maxActive ) ) ; String maxIdle = atts . getValue ( tags . getTagById ( CON_MAX_IDLE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_MAX_IDLE ) + ": " + maxIdle ) ; if ( checkString ( maxIdle ) ) m_CurrentCPD . setMaxIdle ( Integer . parseInt ( maxIdle ) ) ; String maxWait = atts . getValue ( tags . getTagById ( CON_MAX_WAIT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_MAX_WAIT ) + ": " + maxWait ) ; if ( checkString ( maxWait ) ) m_CurrentCPD . setMaxWait ( Integer . parseInt ( maxWait ) ) ; String minEvictableIdleTimeMillis = atts . getValue ( tags . getTagById ( CON_MIN_EVICTABLE_IDLE_TIME_MILLIS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_MIN_EVICTABLE_IDLE_TIME_MILLIS ) + ": " + minEvictableIdleTimeMillis ) ; if ( checkString ( minEvictableIdleTimeMillis ) ) m_CurrentCPD . setMinEvictableIdleTimeMillis ( Long . parseLong ( minEvictableIdleTimeMillis ) ) ; String numTestsPerEvictionRun = atts . getValue ( tags . getTagById ( CON_NUM_TESTS_PER_EVICTION_RUN ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_NUM_TESTS_PER_EVICTION_RUN ) + ": " + numTestsPerEvictionRun ) ; if ( checkString ( numTestsPerEvictionRun ) ) m_CurrentCPD . setNumTestsPerEvictionRun ( Integer . parseInt ( numTestsPerEvictionRun ) ) ; String testOnBorrow = atts . getValue ( tags . getTagById ( CON_TEST_ON_BORROW ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_TEST_ON_BORROW ) + ": " + testOnBorrow ) ; if ( checkString ( testOnBorrow ) ) m_CurrentCPD . setTestOnBorrow ( Boolean . valueOf ( testOnBorrow ) . booleanValue ( ) ) ; String testOnReturn = atts . getValue ( tags . getTagById ( CON_TEST_ON_RETURN ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_TEST_ON_RETURN ) + ": " + testOnReturn ) ; if ( checkString ( testOnReturn ) ) m_CurrentCPD . setTestOnReturn ( Boolean . valueOf ( testOnReturn ) . booleanValue ( ) ) ; String testWhileIdle = atts . getValue ( tags . getTagById ( CON_TEST_WHILE_IDLE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_TEST_WHILE_IDLE ) + ": " + testWhileIdle ) ; if ( checkString ( testWhileIdle ) ) m_CurrentCPD . setTestWhileIdle ( Boolean . valueOf ( testWhileIdle ) . booleanValue ( ) ) ; String timeBetweenEvictionRunsMillis = atts . getValue ( tags . getTagById ( CON_TIME_BETWEEN_EVICTION_RUNS_MILLIS ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_TIME_BETWEEN_EVICTION_RUNS_MILLIS ) + ": " + timeBetweenEvictionRunsMillis ) ; if ( checkString ( timeBetweenEvictionRunsMillis ) ) m_CurrentCPD . setTimeBetweenEvictionRunsMillis ( Long . parseLong ( timeBetweenEvictionRunsMillis ) ) ; String whenExhaustedAction = atts . getValue ( tags . getTagById ( CON_WHEN_EXHAUSTED_ACTION ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_WHEN_EXHAUSTED_ACTION ) + ": " + whenExhaustedAction ) ; if ( checkString ( whenExhaustedAction ) ) m_CurrentCPD . setWhenExhaustedAction ( Byte . parseByte ( whenExhaustedAction ) ) ; String connectionFactoryStr = atts . getValue ( tags . getTagById ( CONNECTION_FACTORY ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CONNECTION_FACTORY ) + ": " + connectionFactoryStr ) ; if ( checkString ( connectionFactoryStr ) ) m_CurrentCPD . setConnectionFactory ( ClassHelper . getClass ( connectionFactoryStr ) ) ; String validationQuery = atts . getValue ( tags . getTagById ( VALIDATION_QUERY ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( VALIDATION_QUERY ) + ": " + validationQuery ) ; if ( checkString ( validationQuery ) ) m_CurrentCPD . setValidationQuery ( validationQuery ) ; // abandoned connection properties String logAbandoned = atts . getValue ( tags . getTagById ( CON_LOG_ABANDONED ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_LOG_ABANDONED ) + ": " + logAbandoned ) ; if ( checkString ( logAbandoned ) ) m_CurrentCPD . setLogAbandoned ( Boolean . valueOf ( logAbandoned ) . booleanValue ( ) ) ; String removeAbandoned = atts . getValue ( tags . getTagById ( CON_REMOVE_ABANDONED ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_REMOVE_ABANDONED ) + ": " + removeAbandoned ) ; if ( checkString ( removeAbandoned ) ) m_CurrentCPD . setRemoveAbandoned ( Boolean . valueOf ( removeAbandoned ) . booleanValue ( ) ) ; String removeAbandonedTimeout = atts . getValue ( tags . getTagById ( CON_REMOVE_ABANDONED_TIMEOUT ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CON_REMOVE_ABANDONED_TIMEOUT ) + ": " + removeAbandonedTimeout ) ; if ( checkString ( removeAbandonedTimeout ) ) m_CurrentCPD . setRemoveAbandonedTimeout ( Integer . parseInt ( removeAbandonedTimeout ) ) ; } break ; } case OBJECT_CACHE : { String className = atts . getValue ( tags . getTagById ( CLASS_NAME ) ) ; if ( checkString ( className ) && m_CurrentJCD != null ) { ObjectCacheDescriptor ocd = m_CurrentJCD . getObjectCacheDescriptor ( ) ; this . currentAttributeContainer = ocd ; ocd . setObjectCache ( ClassHelper . getClass ( className ) ) ; if ( isDebug ) logger . debug ( " > " + tags . getTagById ( OBJECT_CACHE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( CLASS_NAME ) + ": " + className ) ; } break ; } case SEQUENCE_MANAGER : { String className = atts . getValue ( tags . getTagById ( SEQUENCE_MANAGER_CLASS ) ) ; if ( checkString ( className ) ) { this . currentSequenceDescriptor = new SequenceDescriptor ( this . m_CurrentJCD ) ; this . currentAttributeContainer = currentSequenceDescriptor ; this . m_CurrentJCD . setSequenceDescriptor ( this . currentSequenceDescriptor ) ; if ( isDebug ) logger . debug ( " > " + tags . getTagById ( SEQUENCE_MANAGER ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( SEQUENCE_MANAGER_CLASS ) + ": " + className ) ; if ( checkString ( className ) ) currentSequenceDescriptor . setSequenceManagerClass ( ClassHelper . getClass ( className ) ) ; } break ; } case ATTRIBUTE : { // handle custom attributes String attributeName = atts . getValue ( tags . getTagById ( ATTRIBUTE_NAME ) ) ; String attributeValue = atts . getValue ( tags . getTagById ( ATTRIBUTE_VALUE ) ) ; // If we have a container to store this attribute in , then do so . if ( this . currentAttributeContainer != null ) { if ( checkString ( attributeName ) ) { if ( isDebug ) logger . debug ( " > " + tags . getTagById ( ATTRIBUTE ) ) ; if ( isDebug ) logger . debug ( " " + tags . getTagById ( ATTRIBUTE_NAME ) + ": " + attributeName + " " + tags . getTagById ( ATTRIBUTE_VALUE ) + ": " + attributeValue ) ; this . currentAttributeContainer . addAttribute ( attributeName , attributeValue ) ; // logger . info ( " attribute [ " + attributeName + " = " + attributeValue + " ] add to " + currentAttributeContainer . getClass ( ) ) ; } else { logger . info ( "Found 'null' or 'empty' attribute object for element " + currentAttributeContainer . getClass ( ) + " attribute-name=" + attributeName + ", attribute-value=" + attributeValue + " See jdbc-connection-descriptor with jcdAlias '" + m_CurrentJCD . getJcdAlias ( ) + "'" ) ; } } // else // logger . info ( " Found attribute ( name = " + attributeName + " , value = " + attributeValue + // " ) but I could not assign them to a descriptor " ) ; break ; } default : { // noop } } } catch ( Exception ex ) { logger . error ( ex ) ; throw new PersistenceBrokerException ( ex ) ; }
public class CouchDbClient { /** * Execute a HTTP request and handle common error cases . * @ param connection the HttpConnection request to execute * @ return the executed HttpConnection * @ throws CouchDbException for HTTP error codes or if an IOException was thrown */ public HttpConnection execute ( HttpConnection connection ) { } }
// set our HttpUrlFactory on the connection connection . connectionFactory = factory ; // all CouchClient requests want to receive application / json responses connection . requestProperties . put ( "Accept" , "application/json" ) ; connection . responseInterceptors . addAll ( this . responseInterceptors ) ; connection . requestInterceptors . addAll ( this . requestInterceptors ) ; InputStream es = null ; // error stream - response from server for a 500 etc // first try to execute our request and get the input stream with the server ' s response // we want to catch IOException because HttpUrlConnection throws these for non - success // responses ( eg 404 throws a FileNotFoundException ) but we need to map to our own // specific exceptions try { try { connection = connection . execute ( ) ; } catch ( HttpConnectionInterceptorException e ) { CouchDbException exception = new CouchDbException ( connection . getConnection ( ) . getResponseMessage ( ) , connection . getConnection ( ) . getResponseCode ( ) ) ; if ( e . deserialize ) { try { JsonObject errorResponse = new Gson ( ) . fromJson ( e . error , JsonObject . class ) ; exception . error = getAsString ( errorResponse , "error" ) ; exception . reason = getAsString ( errorResponse , "reason" ) ; } catch ( JsonParseException jpe ) { exception . error = e . error ; } } else { exception . error = e . error ; exception . reason = e . reason ; } throw exception ; } int code = connection . getConnection ( ) . getResponseCode ( ) ; String response = connection . getConnection ( ) . getResponseMessage ( ) ; // everything ok ? return the stream if ( code / 100 == 2 ) { // success [ 200,299] return connection ; } else { final CouchDbException ex ; switch ( code ) { case HttpURLConnection . HTTP_NOT_FOUND : // 404 ex = new NoDocumentException ( response ) ; break ; case HttpURLConnection . HTTP_CONFLICT : // 409 ex = new DocumentConflictException ( response ) ; break ; case HttpURLConnection . HTTP_PRECON_FAILED : // 412 ex = new PreconditionFailedException ( response ) ; break ; case 429 : // If a Replay429Interceptor is present it will check for 429 and retry at // intervals . If the retries do not succeed or no 429 replay was configured // we end up here and throw a TooManyRequestsException . ex = new TooManyRequestsException ( response ) ; break ; default : ex = new CouchDbException ( response , code ) ; break ; } es = connection . getConnection ( ) . getErrorStream ( ) ; // if there is an error stream try to deserialize into the typed exception if ( es != null ) { try { // read the error stream into memory byte [ ] errorResponse = IOUtils . toByteArray ( es ) ; Class < ? extends CouchDbException > exceptionClass = ex . getClass ( ) ; // treat the error as JSON and try to deserialize try { // Register an InstanceCreator that returns the existing exception so // we can just populate the fields , but not ignore the constructor . // Uses a new Gson so we don ' t accidentally recycle an exception . Gson g = new GsonBuilder ( ) . registerTypeAdapter ( exceptionClass , new CouchDbExceptionInstanceCreator ( ex ) ) . create ( ) ; // Now populate the exception with the error / reason other info from JSON g . fromJson ( new InputStreamReader ( new ByteArrayInputStream ( errorResponse ) , "UTF-8" ) , exceptionClass ) ; } catch ( JsonParseException e ) { // The error stream was not JSON so just set the string content as the // error field on ex before we throw it ex . error = new String ( errorResponse , "UTF-8" ) ; } } finally { close ( es ) ; } } ex . setUrl ( connection . url . toString ( ) ) ; throw ex ; } } catch ( IOException ioe ) { CouchDbException ex = new CouchDbException ( "Error retrieving server response" , ioe ) ; ex . setUrl ( connection . url . toString ( ) ) ; throw ex ; }
public class Sequence { /** * Adds an outcome and probability to this sequence . * @ param outcome the outcome to be added . * @ param p the probability associated with this outcome . */ public void add ( String outcome , double p ) { } }
outcomes . add ( outcome ) ; probs . add ( new Double ( p ) ) ; score += Math . log ( p ) ;
public class GetFunctionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetFunctionRequest getFunctionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getFunctionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getFunctionRequest . getFunctionName ( ) , FUNCTIONNAME_BINDING ) ; protocolMarshaller . marshall ( getFunctionRequest . getQualifier ( ) , QUALIFIER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsWorkplaceMessages { /** * Gathers all localization files for the workplace from the different modules . < p > * For a module named " my . module . name " the locale file must be named * " my . module . name . workplace " or " my . module . name . messages " and * be located in the classpath so that the resource loader can find it . < p > * @ param locale the selected locale * @ return an initialized set of module messages */ private static List < CmsMessages > collectModuleMessages ( Locale locale ) { } }
// create a new list and add the base bundle ArrayList < CmsMessages > result = new ArrayList < CmsMessages > ( ) ; // / / / / / iterate over all registered modules / / / / / Set < String > names = new HashSet < String > ( ) ; Set < String > modules = OpenCms . getModuleManager ( ) . getModuleNames ( ) ; if ( modules != null ) { names . addAll ( modules ) ; } // use service loader to get additional bundle names Iterator < I_CmsWorkplaceMessageBundleProvider > providers = ServiceLoader . load ( I_CmsWorkplaceMessageBundleProvider . class ) . iterator ( ) ; while ( providers . hasNext ( ) ) { try { I_CmsWorkplaceMessageBundleProvider provider = providers . next ( ) ; names . addAll ( provider . getMessageBundleNames ( ) ) ; } catch ( Throwable t ) { LOG . error ( "Error loading workplace messages bundle names from classpath." , t ) ; } } // iterate all module names for ( String baseName : names ) { // / / / / / collect the workplace . properties / / / / / // this should result in a name like " my . module . name . workplace " String bundleName = baseName + PREFIX_BUNDLE_WORKPLACE ; // try to load a bundle with the module names CmsMessages msg = new CmsMessages ( bundleName , locale ) ; // bundle was loaded , add to list of bundles if ( msg . isInitialized ( ) ) { result . add ( msg ) ; } // / / / / / collect the messages . properties / / / / / // this should result in a name like " my . module . name . messages " bundleName = baseName + PREFIX_BUNDLE_MESSAGES ; // try to load a bundle with the module names msg = new CmsMessages ( bundleName , locale ) ; // bundle was loaded , add to list of bundles if ( msg . isInitialized ( ) ) { result . add ( msg ) ; } } // / / / / / collect additional core packages / / / / / I_CmsMessageBundle [ ] coreMsgs = A_CmsMessageBundle . getOpenCmsMessageBundles ( ) ; for ( int i = 0 ; i < coreMsgs . length ; i ++ ) { I_CmsMessageBundle bundle = coreMsgs [ i ] ; result . add ( bundle . getBundle ( locale ) ) ; } // / / / / / collect bundles configured in module configurations / / / / / if ( OpenCms . getADEManager ( ) . isInitialized ( ) ) { Set < String > bundleNames = OpenCms . getADEManager ( ) . getConfiguredWorkplaceBundles ( ) ; for ( String bundleName : bundleNames ) { CmsMessages msg = new CmsMessages ( bundleName , locale ) ; if ( msg . isInitialized ( ) ) { result . add ( msg ) ; } } } return result ;
public class TempCharStream { /** * Clean up the temp stream . */ public void destroy ( ) { } }
try { close ( ) ; } catch ( IOException e ) { } TempCharBuffer ptr = _head ; _head = null ; _tail = null ; TempCharBuffer . freeAll ( ptr ) ;
public class DataUnitBuilder { /** * Returns the content of < code > data < / code > as unsigned bytes in hexadecimal string * representation . * This method does not add hexadecimal prefixes ( like 0x ) . * @ param data data array to format * @ param sep separator to insert between 2 formatted data bytes , < code > null < / code > * or " " for no gap between byte tokens * @ return an unsigned hexadecimal string of data */ public static String toHex ( final byte [ ] data , final String sep ) { } }
final StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < data . length ; ++ i ) { final int no = data [ i ] & 0xff ; if ( no < 0x10 ) sb . append ( '0' ) ; sb . append ( Integer . toHexString ( no ) ) ; if ( sep != null && i < data . length - 1 ) sb . append ( sep ) ; } return sb . toString ( ) ;
public class OrderItemUrl { /** * Get Resource Url for UpdateOrderItemDiscount * @ param discountId discountId parameter description DOCUMENT _ HERE * @ param orderId Unique identifier of the order . * @ param orderItemId Unique identifier of the item to remove from the order . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ param updateMode Specifies whether to update the original order , update the order in draft mode , or update the order in draft mode and then commit the changes to the original . Draft mode enables users to make incremental order changes before committing the changes to the original order . Valid values are " ApplyToOriginal , " " ApplyToDraft , " or " ApplyAndCommit . " * @ param version Determines whether or not to check versioning of items for concurrency purposes . * @ return String Resource Url */ public static MozuUrl updateOrderItemDiscountUrl ( Integer discountId , String orderId , String orderItemId , String responseFields , String updateMode , String version ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/orders/{orderId}/items/{orderItemId}/discounts/{discountId}?updatemode={updateMode}&version={version}&responseFields={responseFields}" ) ; formatter . formatUrl ( "discountId" , discountId ) ; formatter . formatUrl ( "orderId" , orderId ) ; formatter . formatUrl ( "orderItemId" , orderItemId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; formatter . formatUrl ( "updateMode" , updateMode ) ; formatter . formatUrl ( "version" , version ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class WebUtilities { /** * Returns the context for this component . The component may not be in the current context . * @ param component the component to find the context it belongs to * @ return the component ' s context */ public static UIContext getContextForComponent ( final WComponent component ) { } }
// Start with the current Context UIContext result = UIContextHolder . getCurrent ( ) ; // Go through the contexts until we find the component while ( result instanceof SubUIContext && ! ( ( SubUIContext ) result ) . isInContext ( component ) ) { result = ( ( SubUIContext ) result ) . getBacking ( ) ; } return result ;
public class NatCubic { /** * / * calculates the natural cubic spline that interpolates * y [ 0 ] , y [ 1 ] , . . . y [ n ] * The first segment is returned as * C [ 0 ] . a + C [ 0 ] . b * u + C [ 0 ] . c * u ^ 2 + C [ 0 ] . d * u ^ 3 0 < = u < 1 * the other segments are in C [ 1 ] , C [ 2 ] , . . . C [ n - 1] */ public Cubic [ ] calcNaturalCubic ( int n , double [ ] x ) { } }
double [ ] gamma = new double [ n + 1 ] ; double [ ] delta = new double [ n + 1 ] ; double [ ] D = new double [ n + 1 ] ; int i ; /* We solve the equation [2 1 ] [ D [ 0 ] ] [ 3 ( x [ 1 ] - x [ 0 ] ) ] | 1 4 1 | | D [ 1 ] | | 3 ( x [ 2 ] - x [ 0 ] ) | | 1 4 1 | | . | = | . | | 1 4 1 | | . | | 3 ( x [ n ] - x [ n - 2 ] ) | [ 1 2 ] [ D [ n ] ] [ 3 ( x [ n ] - x [ n - 1 ] ) ] by using row operations to convert the matrix to upper triangular and then back sustitution . The D [ i ] are the derivatives at the knots . */ gamma [ 0 ] = 1.0f / 2.0f ; for ( i = 1 ; i < n ; i ++ ) { gamma [ i ] = 1 / ( 4 - gamma [ i - 1 ] ) ; } gamma [ n ] = 1 / ( 2 - gamma [ n - 1 ] ) ; delta [ 0 ] = 3 * ( x [ 1 ] - x [ 0 ] ) * gamma [ 0 ] ; for ( i = 1 ; i < n ; i ++ ) { delta [ i ] = ( 3 * ( x [ i + 1 ] - x [ i - 1 ] ) - delta [ i - 1 ] ) * gamma [ i ] ; } delta [ n ] = ( 3 * ( x [ n ] - x [ n - 1 ] ) - delta [ n - 1 ] ) * gamma [ n ] ; D [ n ] = delta [ n ] ; for ( i = n - 1 ; i >= 0 ; i -- ) { D [ i ] = delta [ i ] - gamma [ i ] * D [ i + 1 ] ; } /* now compute the coefficients of the cubics */ Cubic [ ] C = new Cubic [ n ] ; for ( i = 0 ; i < n ; i ++ ) { C [ i ] = new Cubic ( ( double ) x [ i ] , D [ i ] , 3 * ( x [ i + 1 ] - x [ i ] ) - 2 * D [ i ] - D [ i + 1 ] , 2 * ( x [ i ] - x [ i + 1 ] ) + D [ i ] + D [ i + 1 ] ) ; } return C ;
public class AstBuilder { /** * type { - - - - - */ @ Override public ClassNode visitType ( TypeContext ctx ) { } }
if ( ! asBoolean ( ctx ) ) { return ClassHelper . OBJECT_TYPE ; } ClassNode classNode = null ; if ( asBoolean ( ctx . classOrInterfaceType ( ) ) ) { ctx . classOrInterfaceType ( ) . putNodeMetaData ( IS_INSIDE_INSTANCEOF_EXPR , ctx . getNodeMetaData ( IS_INSIDE_INSTANCEOF_EXPR ) ) ; classNode = this . visitClassOrInterfaceType ( ctx . classOrInterfaceType ( ) ) ; } else if ( asBoolean ( ctx . primitiveType ( ) ) ) { classNode = this . visitPrimitiveType ( ctx . primitiveType ( ) ) ; } if ( ! asBoolean ( classNode ) ) { if ( VOID_STR . equals ( ctx . getText ( ) ) ) { // TODO refine error message for ` void ` throw createParsingFailedException ( "void is not allowed here" , ctx ) ; } throw createParsingFailedException ( "Unsupported type: " + ctx . getText ( ) , ctx ) ; } classNode . addAnnotations ( this . visitAnnotationsOpt ( ctx . annotationsOpt ( ) ) ) ; List < List < AnnotationNode > > dimList = this . visitDimsOpt ( ctx . dimsOpt ( ) ) ; if ( asBoolean ( dimList ) ) { // clear array ' s generics type info . Groovy ' s bug ? array ' s generics type will be ignored . e . g . List < String > [ ] . . . p classNode . setGenericsTypes ( null ) ; classNode . setUsingGenerics ( false ) ; classNode = this . createArrayType ( classNode , dimList ) ; } return configureAST ( classNode , ctx ) ;
public class PortletContextLoaderListener { /** * { @ inheritDoc } */ @ Override public void contextDestroyed ( ServletContextEvent sce ) { } }
// destroy the root portlet app context final ServletContext servletContext = sce . getServletContext ( ) ; contextLoader . closeWebApplicationContext ( servletContext ) ; servletContext . removeAttribute ( PortletApplicationContextUtils2 . ROOT_PORTLET_APPLICATION_CONTEXT_LOADER_ATTRIBUTE ) ; contextLoader = null ;
public class RollupReducer { /** * Compares sequentially the fields from two tuples and returns which field * they differ from . Use custom comparators when provided . The provided * RawComparators must implement " compare " so we should use them . < p > * Important . The contract of this method is that the tuples will differ * always between minField and maxField . If they are equal then an Exception * is thrown . < / p > */ private int indexMismatch ( ITuple tuple1 , ITuple tuple2 , int minFieldIndex , int maxFieldIndex ) { } }
int schemaId1 = tupleMRConfig . getSchemaIdByName ( tuple1 . getSchema ( ) . getName ( ) ) ; int schemaId2 = tupleMRConfig . getSchemaIdByName ( tuple2 . getSchema ( ) . getName ( ) ) ; int [ ] translationTuple1 = serInfo . getGroupSchemaIndexTranslation ( schemaId1 ) ; int [ ] translationTuple2 = serInfo . getGroupSchemaIndexTranslation ( schemaId2 ) ; for ( int i = minFieldIndex ; i <= maxFieldIndex ; i ++ ) { Object obj1 = tuple1 . get ( translationTuple1 [ i ] ) ; Object obj2 = tuple2 . get ( translationTuple2 [ i ] ) ; @ SuppressWarnings ( "unchecked" ) RawComparator < Object > customComparator = ( RawComparator < Object > ) customComparators [ i ] ; if ( customComparator != null ) { // TODO we assume here that customComparator must implement compare by objects . // Even if it ' s annoying , we should serialize and compare in binary . if ( customComparator . compare ( obj1 , obj2 ) != 0 ) { return i ; } } else { if ( SortComparator . compareObjects ( obj1 , obj2 ) != 0 ) { return i ; } } } throw new RuntimeException ( "Illegal state.The tuples " + tuple1 + " and " + tuple2 + " compare the same between indexes " + minFieldIndex + " and " + maxFieldIndex ) ;
public class InMemoryCookieStore { /** * a cookie in index should be returned */ private < T extends Comparable < T > > void getInternal2 ( List < HttpCookie > cookies , Map < T , List < HttpCookie > > cookieIndex , T comparator ) { } }
// Removed cookieJar for ( T index : cookieIndex . keySet ( ) ) { if ( ( index == comparator ) || ( index != null && comparator . compareTo ( index ) == 0 ) ) { List < HttpCookie > indexedCookies = cookieIndex . get ( index ) ; // check the list of cookies associated with this domain if ( indexedCookies != null ) { Iterator < HttpCookie > it = indexedCookies . iterator ( ) ; while ( it . hasNext ( ) ) { HttpCookie ck = it . next ( ) ; // the cookie still in main cookie store if ( ! ck . hasExpired ( ) ) { // don ' t add twice if ( ! cookies . contains ( ck ) ) cookies . add ( ck ) ; } else { it . remove ( ) ; } } } // end of indexedCookies ! = null } // end of comparator . compareTo ( index ) = = 0 } // end of cookieIndex iteration
public class ExcelExecutor { /** * 将数据写入excel * @ param datas 要写入excel的pojo数据 * @ param hasTitle 是否需要标题 * @ param outputStream 输出流 ( 该流不会关闭 , 需要用户手动关闭 ) * @ param inMemory 最多保留在内存中多少行 * @ throws IOException IO异常 */ public void writeToExcel ( List < ? extends Object > datas , boolean hasTitle , OutputStream outputStream , int inMemory ) throws IOException { } }
writeToExcel ( datas , hasTitle , outputStream , inMemory , false ) ;
public class ModelGenerator { /** * Instrospects the provided class , creates a model object ( JS code ) and writes it * into the response . * @ param request the http servlet request * @ param response the http servlet response * @ param clazz class that the generator should introspect * @ param format specifies which code ( ExtJS or Touch ) the generator should create * @ param includeValidation specifies if any validation configurations should be added * to the model code * @ param debug if true the generator creates the output in pretty format , false the * output is compressed * @ throws IOException */ public static void writeModel ( HttpServletRequest request , HttpServletResponse response , Class < ? > clazz , OutputFormat format , IncludeValidation includeValidation , boolean debug ) throws IOException { } }
OutputConfig outputConfig = new OutputConfig ( ) ; outputConfig . setIncludeValidation ( includeValidation ) ; outputConfig . setOutputFormat ( format ) ; outputConfig . setDebug ( debug ) ; ModelBean model = createModel ( clazz , outputConfig ) ; writeModel ( request , response , model , outputConfig ) ;
public class ColorFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public String convertToString ( EDataType eDataType , Object instanceValue ) { } }
switch ( eDataType . getClassifierID ( ) ) { case ColorPackage . HEX_COLOR : return convertHexColorToString ( eDataType , instanceValue ) ; default : throw new IllegalArgumentException ( "The datatype '" + eDataType . getName ( ) + "' is not a valid classifier" ) ; }
public class SocketBindingGroupAddHandler { /** * { @ inheritDoc } */ @ Override protected void populateModel ( OperationContext context , ModelNode operation , Resource resource ) throws OperationFailedException { } }
super . populateModel ( context , operation , resource ) ; // We need to store the address value in the ' name ' instead of using // ReadResourceNameOperationStepHandler to avoid picky legacy controller // model comparison failures resource . getModel ( ) . get ( NAME ) . set ( context . getCurrentAddressValue ( ) ) ; DomainModelIncludesValidator . addValidationStep ( context , operation ) ;
public class AndPermission { /** * Some privileges permanently disabled , may need to set up in the execute . * @ param fragment { @ link android . app . Fragment } . * @ param deniedPermissions one or more permissions . * @ return true , other wise is false . */ public static boolean hasAlwaysDeniedPermission ( android . app . Fragment fragment , String ... deniedPermissions ) { } }
return hasAlwaysDeniedPermission ( new FragmentSource ( fragment ) , deniedPermissions ) ;
public class AmBaseBolt { /** * Create keyhistory from original key history . < br > * Use following situation . * < ol > * < li > Not used current message key . < / li > * < li > This class ' s key history function is not executed . < / li > * < / ol > * @ param history original key history * @ return created key history */ protected KeyHistory createKeyRecorededHistory ( KeyHistory history ) { } }
KeyHistory result = null ; if ( history != null ) { // For adjust message splited , use keyhistory ' s deepcopy . result = history . createDeepCopy ( ) ; } return result ;
public class ConnectionConsumer { /** * { @ inheritDoc } */ public void pushMessage ( IPipe pipe , IMessage message ) { } }
// log . trace ( " pushMessage - type : { } " , message . getMessageType ( ) ) ; if ( message instanceof ResetMessage ) { // ignore } else if ( message instanceof StatusMessage ) { StatusMessage statusMsg = ( StatusMessage ) message ; data . sendStatus ( statusMsg . getBody ( ) ) ; } else if ( message instanceof RTMPMessage ) { // make sure chunk size has been sent sendChunkSize ( ) ; // cast to rtmp message RTMPMessage rtmpMsg = ( RTMPMessage ) message ; IRTMPEvent msg = rtmpMsg . getBody ( ) ; // get timestamp int eventTime = msg . getTimestamp ( ) ; log . debug ( "Message timestamp: {}" , eventTime ) ; if ( eventTime < 0 ) { eventTime += Integer . MIN_VALUE ; msg . setTimestamp ( eventTime ) ; log . debug ( "Message has negative timestamp, applying {} offset: {}" , Integer . MIN_VALUE , eventTime ) ; } // get the data type byte dataType = msg . getDataType ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Data type: {} source type: {}" , dataType , ( ( BaseEvent ) msg ) . getSourceType ( ) ) ; } // create a new header for the consumer final Header header = new Header ( ) ; header . setTimerBase ( eventTime ) ; // data buffer IoBuffer buf = null ; switch ( dataType ) { case Constants . TYPE_AGGREGATE : // log . trace ( " Aggregate data " ) ; data . write ( msg ) ; break ; case Constants . TYPE_AUDIO_DATA : // log . trace ( " Audio data " ) ; buf = ( ( AudioData ) msg ) . getData ( ) ; if ( buf != null ) { AudioData audioData = new AudioData ( buf . asReadOnlyBuffer ( ) ) ; audioData . setHeader ( header ) ; audioData . setTimestamp ( header . getTimer ( ) ) ; audioData . setSourceType ( ( ( AudioData ) msg ) . getSourceType ( ) ) ; audio . write ( audioData ) ; } else { log . warn ( "Audio data was not found" ) ; } break ; case Constants . TYPE_VIDEO_DATA : // log . trace ( " Video data " ) ; buf = ( ( VideoData ) msg ) . getData ( ) ; if ( buf != null ) { VideoData videoData = new VideoData ( buf . asReadOnlyBuffer ( ) ) ; videoData . setHeader ( header ) ; videoData . setTimestamp ( header . getTimer ( ) ) ; videoData . setSourceType ( ( ( VideoData ) msg ) . getSourceType ( ) ) ; video . write ( videoData ) ; } else { log . warn ( "Video data was not found" ) ; } break ; case Constants . TYPE_PING : // log . trace ( " Ping " ) ; Ping ping = ( Ping ) msg ; ping . setHeader ( header ) ; conn . ping ( ping ) ; break ; case Constants . TYPE_STREAM_METADATA : if ( log . isTraceEnabled ( ) ) { log . trace ( "Meta data: {}" , ( Notify ) msg ) ; } // Notify notify = new Notify ( ( ( Notify ) msg ) . getData ( ) . asReadOnlyBuffer ( ) ) ; Notify notify = ( Notify ) msg ; notify . setHeader ( header ) ; notify . setTimestamp ( header . getTimer ( ) ) ; data . write ( notify ) ; break ; case Constants . TYPE_FLEX_STREAM_SEND : // if ( log . isTraceEnabled ( ) ) { // log . trace ( " Flex stream send : { } " , ( Notify ) msg ) ; FlexStreamSend send = null ; if ( msg instanceof FlexStreamSend ) { send = ( FlexStreamSend ) msg ; } else { send = new FlexStreamSend ( ( ( Notify ) msg ) . getData ( ) . asReadOnlyBuffer ( ) ) ; } send . setHeader ( header ) ; send . setTimestamp ( header . getTimer ( ) ) ; data . write ( send ) ; break ; case Constants . TYPE_BYTES_READ : // log . trace ( " Bytes read " ) ; BytesRead bytesRead = ( BytesRead ) msg ; bytesRead . setHeader ( header ) ; bytesRead . setTimestamp ( header . getTimer ( ) ) ; conn . getChannel ( ( byte ) 2 ) . write ( bytesRead ) ; break ; default : // log . trace ( " Default : { } " , dataType ) ; data . write ( msg ) ; } } else { log . debug ( "Unhandled push message: {}" , message ) ; if ( log . isTraceEnabled ( ) ) { Class < ? extends IMessage > clazz = message . getClass ( ) ; log . trace ( "Class info - name: {} declaring: {} enclosing: {}" , new Object [ ] { clazz . getName ( ) , clazz . getDeclaringClass ( ) , clazz . getEnclosingClass ( ) } ) ; } }
public class random { /** * Create an { @ code IntStream } which creates random indexes within the * given range and the index probability . * @ since 3.0 * @ param random the random engine used for calculating the random * indexes * @ param n the end index ( exclusively ) . The start index is zero . * @ param p the index selection probability * @ return an new random index stream * @ throws IllegalArgumentException if { @ code p } is not a * valid probability . * @ throws NullPointerException if the given { @ code random } * engine is { @ code null } . */ public static IntStream indexes ( final Random random , final int n , final double p ) { } }
return indexes ( random , 0 , n , p ) ;
public class Math { /** * Returns the row maximum for a matrix . */ public static double [ ] rowMax ( double [ ] [ ] data ) { } }
double [ ] x = new double [ data . length ] ; for ( int i = 0 ; i < x . length ; i ++ ) { x [ i ] = max ( data [ i ] ) ; } return x ;
public class DBEngineVersion { /** * A list of the time zones supported by this engine for the < code > Timezone < / code > parameter of the * < code > CreateDBInstance < / code > action . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSupportedTimezones ( java . util . Collection ) } or { @ link # withSupportedTimezones ( java . util . Collection ) } if * you want to override the existing values . * @ param supportedTimezones * A list of the time zones supported by this engine for the < code > Timezone < / code > parameter of the * < code > CreateDBInstance < / code > action . * @ return Returns a reference to this object so that method calls can be chained together . */ public DBEngineVersion withSupportedTimezones ( Timezone ... supportedTimezones ) { } }
if ( this . supportedTimezones == null ) { setSupportedTimezones ( new java . util . ArrayList < Timezone > ( supportedTimezones . length ) ) ; } for ( Timezone ele : supportedTimezones ) { this . supportedTimezones . add ( ele ) ; } return this ;
public class ItemDataTraversingVisitor { /** * Visit all child properties . */ protected void visitChildProperties ( NodeData node ) throws RepositoryException { } }
if ( isInterrupted ( ) ) return ; for ( PropertyData data : dataManager . getChildPropertiesData ( node ) ) { if ( isInterrupted ( ) ) return ; data . accept ( this ) ; }
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createLPA ( int cic ) */ public LoopbackAckMessage createLPA ( ) { } }
LoopbackAckMessage msg = new LoopbackAckMessageImpl ( _LPA_HOLDER . mandatoryCodes , _LPA_HOLDER . mandatoryVariableCodes , _LPA_HOLDER . optionalCodes , _LPA_HOLDER . mandatoryCodeToIndex , _LPA_HOLDER . mandatoryVariableCodeToIndex , _LPA_HOLDER . optionalCodeToIndex ) ; return msg ;
public class NameDbUsa { /** * Poor mans profiler * @ param task task to execute * @ param message message identifying the task * @ param tries number of times task needs to be executed */ private static void profile ( Task task , String message , int tries ) { } }
for ( int i = 0 ; i < tries ; i ++ ) { long start = System . nanoTime ( ) ; task . run ( ) ; long finish = System . nanoTime ( ) ; System . out . println ( String . format ( "[Try %d] %-30s: %-5.2fms" , i + 1 , message , ( finish - start ) / 1000000.0 ) ) ; }
public class SqlExecutor { /** * Sets parameters to the PreparedStatement . * @ param stmt the prepared statement * @ param propDescs the property descriptors * @ param entity the entity * @ throws SQLException if something goes wrong */ @ SuppressWarnings ( "unchecked" ) protected void setParameters ( PreparedStatement stmt , PropertyDesc [ ] propDescs , Object entity ) throws SQLException { } }
for ( int i = 0 ; i < propDescs . length ; i ++ ) { PropertyDesc propertyDesc = propDescs [ i ] ; if ( propertyDesc == null /* | | propertyDesc . getValue ( entity ) = = null */ ) { stmt . setObject ( i + 1 , null ) ; } else { Class < ? > propertType = propertyDesc . getPropertyType ( ) ; @ SuppressWarnings ( "rawtypes" ) ValueType valueType = MirageUtil . getValueType ( propertType , propertyDesc , dialect , valueTypes ) ; if ( valueType != null ) { valueType . set ( propertType , stmt , propertyDesc . getValue ( entity ) , i + 1 ) ; } else { if ( logger . isWarnEnabled ( ) ) { logger . warn ( "valueType for " + propertType . getName ( ) + " not found." ) ; } } } }
public class JQLBuilder { /** * < pre > * UPDATE bean01 SET text = : text WHERE id = : id * < / pre > * @ param method * the method * @ param result * the result * @ param dynamicReplace * the dynamic replace * @ param preparedJql * the prepared jql * @ return the jql */ private static JQL buildJQLUpdate ( final SQLiteModelMethod method , final JQL result , final Map < JQLDynamicStatementType , String > dynamicReplace , String preparedJql ) { } }
final Class < ? extends Annotation > annotation = BindSqlUpdate . class ; // extract some informaction from method and bean // use annotation ' s attribute value and exclude and bean definition to // define field list // final Set < String > fields = defineFields ( JQLType . UPDATE , method , // annotation , false ) ; if ( StringUtils . hasText ( preparedJql ) ) { result . value = preparedJql ; // UPDATE can contains bind parameter in column values and select // statement final One < Boolean > inWhereCondition = new One < Boolean > ( false ) ; final One < Boolean > inColumnsToUpdate = new One < Boolean > ( false ) ; JQLChecker . getInstance ( ) . analyze ( method , result , new JqlBaseListener ( ) { @ Override public void enterProjected_columns ( Projected_columnsContext ctx ) { if ( inColumnsToUpdate . value0 ) { result . containsSelectOperation = true ; } } @ Override public void enterConflict_algorithm ( Conflict_algorithmContext ctx ) { result . conflictAlgorithmType = ConflictAlgorithmType . valueOf ( ctx . getText ( ) . toUpperCase ( ) ) ; } @ Override public void enterWhere_stmt ( Where_stmtContext ctx ) { inWhereCondition . value0 = true ; } @ Override public void exitWhere_stmt_clauses ( Where_stmt_clausesContext ctx ) { inWhereCondition . value0 = false ; } @ Override public void enterBind_parameter ( Bind_parameterContext ctx ) { if ( inWhereCondition . value0 ) { result . bindParameterOnWhereStatementCounter ++ ; } else { result . bindParameterAsColumnValueCounter ++ ; } } @ Override public void enterBind_dynamic_sql ( Bind_dynamic_sqlContext ctx ) { JQLDynamicStatementType dynamicType = JQLDynamicStatementType . valueOf ( ctx . bind_parameter_name ( ) . getText ( ) . toUpperCase ( ) ) ; int start = ctx . getStart ( ) . getStartIndex ( ) - 1 ; int stop = ctx . getStop ( ) . getStopIndex ( ) + 1 ; String dynamicWhere = result . value . substring ( start , stop ) ; dynamicReplace . put ( dynamicType , dynamicWhere ) ; // super . enterBind _ dynamic _ sql ( ctx ) ; } @ Override public void enterColumns_to_update ( Columns_to_updateContext ctx ) { inColumnsToUpdate . value0 = true ; } @ Override public void exitColumns_to_update ( Columns_to_updateContext ctx ) { inColumnsToUpdate . value0 = false ; } } ) ; JQLChecker . getInstance ( ) . replaceVariableStatements ( method , preparedJql , new JQLReplaceVariableStatementListenerImpl ( ) { @ Override public String onWhere ( String statement ) { result . annotatedWhere = true ; result . staticWhereConditions = true ; return null ; } } ) ; if ( result . containsSelectOperation ) { AssertKripton . assertTrueOrInvalidMethodSignException ( method . getReturnClass ( ) . equals ( TypeName . VOID ) , method , "defined JQL requires that method's return type is void" ) ; } } else { final SQLiteDaoDefinition dao = method . getParent ( ) ; Set < String > fields ; ModifyType modifyType = SqlModifyBuilder . detectModifyType ( method , JQLType . UPDATE ) ; if ( modifyType == ModifyType . UPDATE_BEAN ) { fields = extractFieldsFromAnnotation ( method , annotation , false ) ; } else { fields = extractFieldsFromMethodParameters ( method , annotation ) ; } AssertKripton . assertTrueOrInvalidMethodSignException ( fields . size ( ) > 0 , method , "no field was specified for update" ) ; result . conflictAlgorithmType = ConflictAlgorithmType . valueOf ( AnnotationUtility . extractAsEnumerationValue ( method . getElement ( ) , annotation , AnnotationAttributeType . CONFLICT_ALGORITHM_TYPE ) ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( UPDATE_KEYWORD ) ; builder . append ( " " + result . conflictAlgorithmType . getSqlForInsert ( ) ) ; // entity name builder . append ( dao . getEntitySimplyClassName ( ) ) ; // recreate fields final One < String > prefix = new One < > ( "" ) ; if ( result . hasParamBean ( ) ) { prefix . value0 = result . paramBean + "." ; } builder . append ( " " + SET_KEYWORD + " " ) ; builder . append ( forEachFields ( fields , new OnFieldListener ( ) { @ Override public String onField ( String item ) { return item + "=" + SqlAnalyzer . PARAM_PREFIX + prefix . value0 + item + SqlAnalyzer . PARAM_SUFFIX ; } } ) ) ; builder . append ( defineWhereStatement ( method , result , annotation , dynamicReplace ) ) ; result . value = builder . toString ( ) ; } result . operationType = JQLType . UPDATE ; result . dynamicReplace = dynamicReplace ; return result ;
public class MapDataHistoryDao { /** * Feeds all versions of the given relation to the handler . The elements are sorted by version , * the oldest version is the first , the newest version is the last element . < br > * If not logged in , the Changeset for each returned element will be null * @ throws OsmNotFoundException if the node has not been found . */ public void getRelationHistory ( long id , Handler < Relation > handler ) { } }
MapDataHandler mapDataHandler = new WrapperOsmElementHandler < > ( Relation . class , handler ) ; boolean authenticate = osm . getOAuth ( ) != null ; osm . makeRequest ( RELATION + "/" + id + "/" + HISTORY , authenticate , new MapDataParser ( mapDataHandler , factory ) ) ;
public class StringExpression { /** * Create a { @ code this . contains ( str ) } expression * < p > Returns true if the given String is contained < / p > * @ param str string * @ return this . contains ( str ) * @ see java . lang . String # contains ( CharSequence ) */ public BooleanExpression contains ( Expression < String > str ) { } }
return Expressions . booleanOperation ( Ops . STRING_CONTAINS , mixin , str ) ;
public class Parser { /** * This method reads in one zip file . Before reading the file , it will check if the current file has the same * number of columns and separator type as the previous files it has parssed . If they do not match , no file will * be parsed in this case . * @ param is * @ param dout * @ param bvs * @ param nextChunk * @ param zidx * @ return * @ throws IOException */ private StreamInfo readOneFile ( final InputStream is , final StreamParseWriter dout , InputStream bvs , StreamParseWriter nextChunk , int zidx , int fileIndex ) throws IOException { } }
int cidx = 0 ; StreamData din = new StreamData ( is ) ; // only check header for 2nd file onward since guess setup is already done on first file . if ( ( fileIndex > 0 ) && ( ! checkFileNHeader ( is , dout , din , cidx ) ) ) // cidx should be the actual column index return new StreamInfo ( zidx , nextChunk ) ; // header is bad , quit now int streamAvailable = is . available ( ) ; while ( streamAvailable > 0 ) { parseChunk ( cidx ++ , din , nextChunk ) ; // cidx here actually goes and get the right column chunk . streamAvailable = is . available ( ) ; // Can ( also ! ) rollover to the next input chunk int xidx = bvs . read ( null , 0 , 0 ) ; // Back - channel read of chunk index if ( xidx > zidx ) { // Advanced chunk index of underlying ByteVec stream ? zidx = xidx ; // Record advancing of chunk nextChunk . close ( ) ; // Match output chunks to input zipfile chunks if ( dout != nextChunk ) { dout . reduce ( nextChunk ) ; if ( _jobKey != null && _jobKey . get ( ) . stop_requested ( ) ) break ; } nextChunk = nextChunk . nextChunk ( ) ; } } parseChunk ( cidx , din , nextChunk ) ; return new StreamInfo ( zidx , nextChunk ) ;
public class OpenAPIConnection { /** * creates default connection for OpenAPI UI endpoint * @ param server - server to connect to * @ param secure - if true connection uses HTTPS * @ return */ public static OpenAPIConnection openAPIUIConnection ( LibertyServer server , boolean secure ) { } }
return new OpenAPIConnection ( server , OPEN_API_UI ) . secure ( secure ) ;
public class DaoManager { /** * Helper method to lookup a DAO if it has already been associated with the class . Otherwise this returns null . */ public synchronized static < D extends Dao < T , ? > , T > D lookupDao ( ConnectionSource connectionSource , Class < T > clazz ) { } }
if ( connectionSource == null ) { throw new IllegalArgumentException ( "connectionSource argument cannot be null" ) ; } ClassConnectionSource key = new ClassConnectionSource ( connectionSource , clazz ) ; Dao < ? , ? > dao = lookupDao ( key ) ; @ SuppressWarnings ( "unchecked" ) D castDao = ( D ) dao ; return castDao ;
public class AddOnRunIssuesUtils { /** * Returns the textual representation of the issues that prevent the extensions of the add - on from being run , if any . * The messages are not internationalised , should be used only for logging and non UI uses . * @ param requirements the run requirements of the add - on whose extensions ' run requirements will be used * @ return a { @ code String } representing the running issue , { @ code null } if none . * @ see AddOn . AddOnRunRequirements # getExtensionRequirements ( ) */ public static List < String > getExtensionsRunningIssues ( AddOn . AddOnRunRequirements requirements ) { } }
if ( ! requirements . hasExtensionsWithRunningIssues ( ) ) { return new ArrayList < > ( 0 ) ; } List < String > issues = new ArrayList < > ( 10 ) ; for ( AddOn . ExtensionRunRequirements extReqs : requirements . getExtensionRequirements ( ) ) { issues . addAll ( getRunningIssues ( extReqs ) ) ; } return issues ;
public class PackratParser { /** * This method tries to apply a production at a given position . The production * is given as a name and not as a concrete rule to process all choices * afterwards . * @ param rule * @ param position * @ return * @ throws TreeException * @ throws ParserException */ private MemoEntry applyRule ( String rule , int position , int line ) throws TreeException , ParserException { } }
printMessage ( "applyRule: " + rule , position , line ) ; MemoEntry m = recall ( rule , position , line ) ; if ( m == null ) { /* * " Create a new LR and push it onto the rule invocation stack . " * At this point we found a rule which was never processed at this position . We * start completely virgin here . . . */ LR lr = new LR ( MemoEntry . failed ( ) , rule , null ) ; ruleInvocationStack = new RuleInvocation ( MemoEntry . failed ( ) , rule , null , ruleInvocationStack ) ; /* * " Memoize lr , then evaluate R . " * Put a fail into memoization memory and evaluate the rule afterwards . */ m = MemoEntry . create ( lr ) ; memo . setMemo ( rule , position , line , m ) ; final MemoEntry ans = eval ( rule , position , line ) ; /* * " Pop lr off the rule invocation stack . " * The evaluation of this lr is finished now and we can remove it from stack . * This was needed in cases a left recursion whould be found within the rule . */ ruleInvocationStack = ruleInvocationStack . getNext ( ) ; if ( ( m . getAnswer ( ) instanceof LR ) && ( ( ( LR ) m . getAnswer ( ) ) . getHead ( ) != null ) ) { /* * If a head was added to lr , we found a recursion during evaluation . We need to * set the seed and process with left recursion evaluation . For that purpose we * grow m with ans as seed . */ lr = ( LR ) m . getAnswer ( ) ; lr . setSeed ( ans ) ; MemoEntry lrAnswer = lrAnswer ( rule , position , line , m ) ; printMessage ( "grow LR for '" + rule + "' (" + lrAnswer + ")." , position , line ) ; return lrAnswer ; } else { /* * We finished an evaluation and did not find a recursion . So the result * ( independent of the the state ) is stored in memo and returned . */ m . set ( ans ) ; printMessage ( "applied '" + rule + "' (" + ans . getAnswer ( ) + ")." , position , line ) ; return ans ; } } else { /* * We were here already and with the same production . We either have a real * answer or we found a recursion with or without currently seed growing . . . */ if ( ( m . getAnswer ( ) instanceof LR ) ) { /* * There is still a LR object in the memo , so we found a recursion or an * in - progress seed grow . We setup the LR seed grow and return the current seed . */ setupLR ( rule , ( LR ) m . getAnswer ( ) ) ; MemoEntry seed = ( ( LR ) m . getAnswer ( ) ) . getSeed ( ) ; printMessage ( "Found recursion or grow in process for '" + rule + "' (" + seed + ")." , position , line ) ; return seed ; } else { /* * We were already here and we have a real result . So we can just return the * answer . */ printMessage ( "already processed '" + rule + "' (" + m + ")." , position , line ) ; return m ; } }
public class PutIntegrationResponseRequest { /** * A key - value map specifying response parameters that are passed to the method response from the back end . The key * is a method response header parameter name and the mapped value is an integration response header value , a static * value enclosed within a pair of single quotes , or a JSON expression from the integration response body . The * mapping key must match the pattern of < code > method . response . header . { name } < / code > , where < code > name < / code > is a * valid and unique header name . The mapped non - static value must match the pattern of * < code > integration . response . header . { name } < / code > or < code > integration . response . body . { JSON - expression } < / code > , * where < code > name < / code > must be a valid and unique response header name and < code > JSON - expression < / code > a valid * JSON expression without the < code > $ < / code > prefix . * @ param responseParameters * A key - value map specifying response parameters that are passed to the method response from the back end . * The key is a method response header parameter name and the mapped value is an integration response header * value , a static value enclosed within a pair of single quotes , or a JSON expression from the integration * response body . The mapping key must match the pattern of < code > method . response . header . { name } < / code > , where * < code > name < / code > is a valid and unique header name . The mapped non - static value must match the pattern of * < code > integration . response . header . { name } < / code > or * < code > integration . response . body . { JSON - expression } < / code > , where < code > name < / code > must be a valid and * unique response header name and < code > JSON - expression < / code > a valid JSON expression without the * < code > $ < / code > prefix . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutIntegrationResponseRequest withResponseParameters ( java . util . Map < String , String > responseParameters ) { } }
setResponseParameters ( responseParameters ) ; return this ;
public class Tinylog1Benchmark { /** * Benchmarks issuing log entries that will be output . * @ param lifeCycle * Can be ignored */ @ Benchmark @ BenchmarkMode ( Mode . Throughput ) public void output ( final LifeCycle lifeCycle ) { } }
Logger . info ( "Hello {}!" , MAGIC_NUMBER ) ;
public class StructureName { /** * Tries to determine the source and pdbId without fully realizing the identifier , * which could require I / O depending on the source * @ throws IllegalArgumentException if the source is recognizable but invalid */ private void init ( ) { } }
// First try identifying a prefix String [ ] prefix = name . split ( ":" , 2 ) ; mySource = null ; if ( prefix . length > 1 ) { // Match Source prefixes String suffix = prefix [ 1 ] ; try { mySource = Source . valueOf ( prefix [ 0 ] . toUpperCase ( ) ) ; } catch ( IllegalArgumentException e ) { // unrecognized prefix ; fall back on guessing mySource = null ; } if ( mySource != null ) { switch ( mySource ) { case SCOP : if ( ! initFromScop ( suffix ) ) throw new IllegalArgumentException ( "Malformed SCOP domain name:" + suffix ) ; return ; case PDP : if ( ! initFromPDP ( name ) ) throw new IllegalArgumentException ( "Malformed PDP domain name:" + suffix ) ; return ; case CATH : if ( ! initFromCATH ( suffix ) ) throw new IllegalArgumentException ( "Malformed CATH domain name:" + suffix ) ; return ; case BIO : if ( ! initFromBIO ( name ) ) throw new IllegalArgumentException ( "Malformed BIO name:" + suffix ) ; return ; case ECOD : if ( ! initFromECOD ( suffix ) ) throw new IllegalArgumentException ( "Malformed ECOD domain name:" + suffix ) ; return ; case PDB : if ( ! initFromPDB ( suffix ) ) throw new IllegalArgumentException ( "Malformed PDB specification:" + suffix ) ; return ; case FILE : // Treat file : / prefixes as URLs if ( ! suffix . startsWith ( "/" ) ) { // Otherwise , treat as file initFromFile ( ) ; return ; } // fall through to URL case case URL : if ( ! initFromURL ( name ) ) throw new IllegalArgumentException ( "Malformed URL specification:" + suffix ) ; return ; default : throw new IllegalStateException ( "Unimplemented Source " + mySource ) ; } } } // No known prefix , so revert to guessing // First guess regex - based identifiers // SCOP domain if ( initFromScop ( name ) ) return ; // CATH if ( initFromCATH ( name ) ) return ; // ECOD if ( initFromECOD ( name ) ) return ; // Never guess BIO or PDP // URL if ( initFromURL ( name ) ) return ; // Guess FILE based on file existence File file = new File ( FileDownloadUtils . expandUserHome ( name ) ) ; if ( file . canRead ( ) && ! file . isDirectory ( ) ) { // an attempt to mitigate issue # 398 . It doesn ' t fix it but it catches the most common case of passing a pdb id and finding a file in working dir matching it if ( name . matches ( "\\d\\w\\w\\w" ) ) { // the plain pdb id case , this is unlikely to be what the user wants : let ' s let it through but warn about it logger . warn ( "Provided 4-letter structure name '{}' matches " + "file name in directory {}. Will read structure " + "data from file {} and not consider the name as a " + "structure identifier. If this is not what you " + "want, use 'FILE:{}'" , name , file . getAbsoluteFile ( ) . getParent ( ) , file . getAbsolutePath ( ) , name ) ; } else { logger . info ( "Provided structure name '{}' matches " + "file name in directory {}. Will read structure " + "data from file {}." , name , file . getAbsoluteFile ( ) . getParent ( ) , file . getAbsolutePath ( ) ) ; } initFromFile ( ) ; return ; } // Default to PDB initFromPDB ( name ) ;
public class GeoPackageCoreImpl { /** * { @ inheritDoc } */ @ Override public List < String > getTables ( String type ) { } }
ContentsDao contentDao = getContentsDao ( ) ; List < String > tableNames ; try { tableNames = contentDao . getTables ( type ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to retrieve " + type + " tables" , e ) ; } return tableNames ;
public class TemplateParserContext { /** * Return the fully qualified name for a given method . Only works if the method has been * statically imported . * @ param methodName The name of the method to get the fully qualified name of * @ return The fully qualified name , or the method name if it ' s unknown */ public String getFullyQualifiedNameForMethodName ( String methodName ) { } }
if ( ! methodNameToFullyQualifiedName . containsKey ( methodName ) ) { return methodName ; } return methodNameToFullyQualifiedName . get ( methodName ) ;
public class VideoRenderer { /** * Inflate the main layout used to render videos in the list view . * @ param inflater LayoutInflater service to inflate . * @ param parent ViewGroup used to inflate xml . * @ return view inflated . */ @ Override protected View inflate ( LayoutInflater inflater , ViewGroup parent ) { } }
View inflatedView = inflater . inflate ( R . layout . video_renderer , parent , false ) ; /* * You don ' t have to use ButterKnife library to implement the mapping between your layout * and your widgets you can implement setUpView and hookListener methods declared in * Renderer < T > class . */ ButterKnife . bind ( this , inflatedView ) ; return inflatedView ;
public class RubyEnumerator { /** * { @ inheritDoc } * @ return this { @ link RubyEnumerator } */ @ Override public RubyEnumerator < E > each ( Consumer < ? super E > block ) { } }
iter . forEach ( block ) ; return this ;
public class ContextServiceImpl { /** * Called by Declarative Services to modify service config properties * @ param context DeclarativeService defined / populated component context */ @ Trivial @ Modified protected void modified ( ComponentContext context ) { } }
Dictionary < String , ? > props = context . getProperties ( ) ; final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "modified" , props ) ; String contextSvcName = ( String ) props . get ( JNDI_NAME ) ; if ( contextSvcName == null ) contextSvcName = ( String ) props . get ( CONFIG_ID ) ; ContextServiceImpl [ ] listeners ; lock . writeLock ( ) . lock ( ) ; try { listeners = modificationListeners . toArray ( new ContextServiceImpl [ modificationListeners . size ( ) ] ) ; modificationListeners . clear ( ) ; componentContext = context ; properties = props ; name = contextSvcName ; threadContextConfigurations = null ; } finally { lock . writeLock ( ) . unlock ( ) ; } for ( ContextServiceImpl listener : listeners ) listener . baseInstanceModified ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "modified" ) ;
public class ArrayMath { /** * Returns the Jensen Shannon divergence ( information radius ) between * a and b , defined as the average of the kl divergences from a to b * and from b to a . */ public static double jensenShannonDivergence ( double [ ] a , double [ ] b ) { } }
double [ ] average = pairwiseAdd ( a , b ) ; multiplyInPlace ( average , .5 ) ; return .5 * klDivergence ( a , average ) + .5 * klDivergence ( b , average ) ;
public class IotHubResourcesInner { /** * Get a list of the consumer groups in the Event Hub - compatible device - to - cloud endpoint in an IoT hub . * Get a list of the consumer groups in the Event Hub - compatible device - to - cloud endpoint in an IoT hub . * ServiceResponse < PageImpl < EventHubConsumerGroupInfoInner > > * @ param resourceGroupName The name of the resource group that contains the IoT hub . * ServiceResponse < PageImpl < EventHubConsumerGroupInfoInner > > * @ param resourceName The name of the IoT hub . * ServiceResponse < PageImpl < EventHubConsumerGroupInfoInner > > * @ param eventHubEndpointName The name of the Event Hub - compatible endpoint . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; EventHubConsumerGroupInfoInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < EventHubConsumerGroupInfoInner > > > listEventHubConsumerGroupsSinglePageAsync ( final String resourceGroupName , final String resourceName , final String eventHubEndpointName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( resourceName == null ) { throw new IllegalArgumentException ( "Parameter resourceName is required and cannot be null." ) ; } if ( eventHubEndpointName == null ) { throw new IllegalArgumentException ( "Parameter eventHubEndpointName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listEventHubConsumerGroups ( this . client . subscriptionId ( ) , resourceGroupName , resourceName , eventHubEndpointName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < EventHubConsumerGroupInfoInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < EventHubConsumerGroupInfoInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < EventHubConsumerGroupInfoInner > > result = listEventHubConsumerGroupsDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < EventHubConsumerGroupInfoInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class ComparatorCompat { /** * Returns a comparator that considers { @ code null } to be greater than non - null . * If the specified comparator is { @ code null } , then the returned * comparator considers all non - null values to be equal . * @ param < T > the type of the objects compared by the comparator * @ param comparator a comparator for comparing non - null values * @ return a comparator */ @ NotNull public static < T > ComparatorCompat < T > nullsLast ( @ Nullable Comparator < ? super T > comparator ) { } }
return nullsComparator ( false , comparator ) ;
public class Checks { /** * Performs a runtime check if the reference is an instance of the provided class * @ param class _ the class to use * @ param reference reference to check * @ param errorMessage the exception message to use if the check fails ; will * be converted to a string using { @ link String # valueOf ( Object ) } * @ param < T > the reference type * @ see Checks # checkIsInstance ( Class , Object , String , Object . . . ) */ @ Beta public static < T > T checkIsInstance ( Class < T > class_ , Object reference , @ Nullable String errorMessage ) { } }
return checkIsInstance ( class_ , reference , errorMessage , EMPTY_ERROR_MESSAGE_ARGS ) ;
public class ValidationRuleStore { /** * Gets validation checker . * @ param rule the rule * @ return the validation checker */ public BaseValidationCheck getValidationChecker ( ValidationRule rule ) { } }
ValidationRule existRule = this . rules . stream ( ) . filter ( r -> r . getRuleName ( ) . equals ( rule . getRuleName ( ) ) ) . findFirst ( ) . orElse ( null ) ; if ( existRule == null ) { throw new ValidationLibException ( "rulename : " + rule . getRuleName ( ) + "checker is notfound " , HttpStatus . INTERNAL_SERVER_ERROR ) ; } return existRule . getValidationCheck ( ) ;
public class Pkg { /** * From a given map ( package name - > version ) , create a list of maps with just one * element each . This is how Salt requires us to send the ' pkgs ' argument when multiple * packages should be installed or removed . * @ param pkgs map with packages ( name - > version ) * @ return list of maps with one element each */ private static List < Map < String , String > > preparePkgs ( Map < String , String > pkgs ) { } }
return pkgs . entrySet ( ) . stream ( ) . map ( e -> Collections . unmodifiableMap ( Stream . of ( e ) . collect ( Collectors . < Map . Entry < String , String > , String , String > toMap ( Map . Entry :: getKey , Map . Entry :: getValue ) ) ) ) . collect ( Collectors . toList ( ) ) ;
public class HttpClientUtil { /** * Quitely shuts down an HttpClient instance by shutting down its connection * manager and ignoring any errors that occur . * @ param http The HttpClient to shutdown */ static public void shutdownQuietly ( HttpClient http ) { } }
if ( http != null ) { try { http . getConnectionManager ( ) . shutdown ( ) ; } catch ( Exception ignore ) { // do nothing } }
public class PatchSourceMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PatchSource patchSource , ProtocolMarshaller protocolMarshaller ) { } }
if ( patchSource == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( patchSource . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( patchSource . getProducts ( ) , PRODUCTS_BINDING ) ; protocolMarshaller . marshall ( patchSource . getConfiguration ( ) , CONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommerceShipmentLocalServiceUtil { /** * Returns the commerce shipment with the primary key . * @ param commerceShipmentId the primary key of the commerce shipment * @ return the commerce shipment * @ throws PortalException if a commerce shipment with the primary key could not be found */ public static com . liferay . commerce . model . CommerceShipment getCommerceShipment ( long commerceShipmentId ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . getCommerceShipment ( commerceShipmentId ) ;
public class ProjectApi { /** * Get an Optional instance with the value for the specific project , which is owned by the authentication user . * < pre > < code > GET / projects / : id < / code > < / pre > * @ param namespace the name of the project namespace or group * @ param project the name of the project * @ return an Optional instance with the specified project as a value */ public Optional < Project > getOptionalProject ( String namespace , String project ) { } }
try { return ( Optional . ofNullable ( getProject ( namespace , project ) ) ) ; } catch ( GitLabApiException glae ) { return ( GitLabApi . createOptionalFromException ( glae ) ) ; }
public class JobReceiver { /** * 添加任务 */ private void addJob ( Job job , JobPo jobPo ) throws DupEntryException { } }
if ( job . isCron ( ) ) { addCronJob ( jobPo ) ; } else if ( job . isRepeatable ( ) ) { addRepeatJob ( jobPo ) ; } else { addTriggerTimeJob ( jobPo ) ; }
public class XAbstractFeatureCallImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public NotificationChain basicSetImplicitFirstArgument ( XExpression newImplicitFirstArgument , NotificationChain msgs ) { } }
XExpression oldImplicitFirstArgument = implicitFirstArgument ; implicitFirstArgument = newImplicitFirstArgument ; if ( eNotificationRequired ( ) ) { ENotificationImpl notification = new ENotificationImpl ( this , Notification . SET , XbasePackage . XABSTRACT_FEATURE_CALL__IMPLICIT_FIRST_ARGUMENT , oldImplicitFirstArgument , newImplicitFirstArgument ) ; if ( msgs == null ) msgs = notification ; else msgs . add ( notification ) ; } return msgs ;
public class SimpleDateFormat { /** * Formats a date or time , which is the standard millis * since January 1 , 1970 , 00:00:00 GMT . * < p > Example : using the US locale : * " yyyy . MM . dd G ' at ' HH : mm : ss zzz " - & gt ; & gt ; 1996.07.10 AD at 15:08:56 PDT * @ param cal the calendar whose date - time value is to be formatted into a date - time string * @ param toAppendTo where the new date - time text is to be appended * @ param pos the formatting position . On input : an alignment field , * if desired . On output : the offsets of the alignment field . * @ return the formatted date - time string . * @ see DateFormat */ @ Override public StringBuffer format ( Calendar cal , StringBuffer toAppendTo , FieldPosition pos ) { } }
TimeZone backupTZ = null ; if ( cal != calendar && ! cal . getType ( ) . equals ( calendar . getType ( ) ) ) { // Different calendar type // We use the time and time zone from the input calendar , but // do not use the input calendar for field calculation . calendar . setTimeInMillis ( cal . getTimeInMillis ( ) ) ; backupTZ = calendar . getTimeZone ( ) ; calendar . setTimeZone ( cal . getTimeZone ( ) ) ; cal = calendar ; } StringBuffer result = format ( cal , getContext ( DisplayContext . Type . CAPITALIZATION ) , toAppendTo , pos , null ) ; if ( backupTZ != null ) { // Restore the original time zone calendar . setTimeZone ( backupTZ ) ; } return result ;