signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class BaseApplet { /** * Throw up a dialog box to show " about " info .
* @ return true . */
public boolean onAbout ( ) { } } | Application application = this . getApplication ( ) ; application . getResources ( null , true ) ; // Set the resource bundle to default
String strTitle = this . getString ( ThinMenuConstants . ABOUT ) ; String strMessage = this . getString ( "Copyright" ) ; JOptionPane . showMessageDialog ( ScreenUtil . getFrame ( this ) , strMessage , strTitle , JOptionPane . INFORMATION_MESSAGE ) ; return true ; |
public class CmsGalleryController { /** * Sets the search scope in the search object . < p >
* @ param scope the search scope */
public void addScope ( CmsGallerySearchScope scope ) { } } | m_searchObject . setScope ( scope ) ; m_searchObjectChanged = true ; ValueChangeEvent . fire ( this , m_searchObject ) ; |
public class MyStreamUtils { /** * Gets string content from InputStream
* @ param is InputStream to read
* @ return InputStream content */
public static String readContent ( InputStream is ) { } } | String ret = "" ; try { String line ; BufferedReader in = new BufferedReader ( new InputStreamReader ( is ) ) ; StringBuffer out = new StringBuffer ( ) ; while ( ( line = in . readLine ( ) ) != null ) { out . append ( line ) . append ( CARRIAGE_RETURN ) ; } ret = out . toString ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return ret ; |
public class ConsumerDispatcher { /** * Returns whether this consumerDispatcher is being used for a durable subscription .
* @ return false if not pubsub or not durable . */
@ Override public boolean isDurable ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isDurable" ) ; final boolean dur = ( _isPubSub && dispatcherState . isDurable ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isDurable" , Boolean . valueOf ( dur ) ) ; return dur ; |
public class FeatureTileTableCoreLinker { /** * Delete the feature tile table link
* @ param featureTable
* feature table
* @ param tileTable
* tile table
* @ return true if deleted */
public boolean deleteLink ( String featureTable , String tileTable ) { } } | boolean deleted = false ; try { if ( featureTileLinkDao . isTableExists ( ) ) { FeatureTileLinkKey id = new FeatureTileLinkKey ( featureTable , tileTable ) ; deleted = featureTileLinkDao . deleteById ( id ) > 0 ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to delete feature tile link for GeoPackage: " + geoPackage . getName ( ) + ", Feature Table: " + featureTable + ", Tile Table: " + tileTable , e ) ; } return deleted ; |
public class Math { /** * Returns the greater of two { @ code float } values . That is ,
* the result is the argument closer to positive infinity . If the
* arguments have the same value , the result is that same
* value . If either value is NaN , then the result is NaN . Unlike
* the numerical comparison operators , this method considers
* negative zero to be strictly smaller than positive zero . If one
* argument is positive zero and the other negative zero , the
* result is positive zero .
* @ param a an argument .
* @ param b another argument .
* @ return the larger of { @ code a } and { @ code b } . */
public static float max ( float a , float b ) { } } | if ( a != a ) return a ; // a is NaN
if ( ( a == 0.0f ) && ( b == 0.0f ) && ( Float . floatToRawIntBits ( a ) == negativeZeroFloatBits ) ) { // Raw conversion ok since NaN can ' t map to - 0.0.
return b ; } return ( a >= b ) ? a : b ; |
public class LinearClassifier { /** * Returns string representation of a list of top features
* @ param topFeatures List of triples indicating feature , label , weight
* @ return String representation of the list of features */
public String topFeaturesToString ( List < Triple < F , L , Double > > topFeatures ) { } } | // find longest key length ( for pretty printing ) with a limit
int maxLeng = 0 ; for ( Triple < F , L , Double > t : topFeatures ) { String key = "(" + t . first + "," + t . second + ")" ; int leng = key . length ( ) ; if ( leng > maxLeng ) { maxLeng = leng ; } } maxLeng = Math . min ( 64 , maxLeng ) ; // set up pretty printing of weights
NumberFormat nf = NumberFormat . getNumberInstance ( ) ; nf . setMinimumFractionDigits ( 4 ) ; nf . setMaximumFractionDigits ( 4 ) ; if ( nf instanceof DecimalFormat ) { ( ( DecimalFormat ) nf ) . setPositivePrefix ( " " ) ; } // print high weight features to a String
StringBuilder sb = new StringBuilder ( ) ; for ( Triple < F , L , Double > t : topFeatures ) { String key = "(" + t . first + "," + t . second + ")" ; sb . append ( StringUtils . pad ( key , maxLeng ) ) ; sb . append ( " " ) ; double cnt = t . third ( ) ; if ( Double . isInfinite ( cnt ) ) { sb . append ( cnt ) ; } else { sb . append ( nf . format ( cnt ) ) ; } sb . append ( "\n" ) ; } return sb . toString ( ) ; |
public class Call { /** * Abbreviation for { { @ link # methodForSetOfString ( String , Object . . . ) } .
* @ since 1.1
* @ param methodName the name of the method
* @ param optionalParameters the ( optional ) parameters of the method .
* @ return the result of the method execution */
public static Function < Object , Set < String > > setOfString ( final String methodName , final Object ... optionalParameters ) { } } | return methodForSetOfString ( methodName , optionalParameters ) ; |
public class SheetRenderer { /** * Encode the row data . Builds row data , style data and read only object .
* @ param context
* @ param sheet
* @ param wb
* @ throws IOException */
protected void encodeData ( final FacesContext context , final Sheet sheet , final WidgetBuilder wb ) throws IOException { } } | final JavascriptVarBuilder jsData = new JavascriptVarBuilder ( null , false ) ; final JavascriptVarBuilder jsRowKeys = new JavascriptVarBuilder ( null , false ) ; final JavascriptVarBuilder jsStyle = new JavascriptVarBuilder ( null , true ) ; final JavascriptVarBuilder jsRowStyle = new JavascriptVarBuilder ( null , false ) ; final JavascriptVarBuilder jsReadOnly = new JavascriptVarBuilder ( null , true ) ; final JavascriptVarBuilder jsRowHeaders = new JavascriptVarBuilder ( null , false ) ; final boolean isCustomHeader = sheet . getRowHeaderValueExpression ( ) != null ; final List < Object > values = sheet . getSortedValues ( ) ; int row = 0 ; for ( final Object value : values ) { context . getExternalContext ( ) . getRequestMap ( ) . put ( sheet . getVar ( ) , value ) ; final String rowKey = sheet . getRowKeyValueAsString ( context ) ; jsRowKeys . appendArrayValue ( rowKey , true ) ; encodeRow ( context , rowKey , jsData , jsRowStyle , jsStyle , jsReadOnly , sheet , row ) ; // In case of custom row header evaluate the value expression for every row to
// set the header
if ( sheet . isShowRowHeaders ( ) && isCustomHeader ) { final String rowHeader = sheet . getRowHeaderValueAsString ( context ) ; jsRowHeaders . appendArrayValue ( rowHeader , true ) ; } row ++ ; } sheet . setRowVar ( context , null ) ; wb . nativeAttr ( "data" , jsData . closeVar ( ) . toString ( ) ) ; wb . nativeAttr ( "styles" , jsStyle . closeVar ( ) . toString ( ) ) ; wb . nativeAttr ( "rowStyles" , jsRowStyle . closeVar ( ) . toString ( ) ) ; wb . nativeAttr ( "readOnlyCells" , jsReadOnly . closeVar ( ) . toString ( ) ) ; wb . nativeAttr ( "rowKeys" , jsRowKeys . closeVar ( ) . toString ( ) ) ; // add the row header as a native attribute
if ( ! isCustomHeader ) { wb . nativeAttr ( "rowHeaders" , sheet . isShowRowHeaders ( ) . toString ( ) ) ; } else { wb . nativeAttr ( "rowHeaders" , jsRowHeaders . closeVar ( ) . toString ( ) ) ; } |
public class AbstractSearch { /** * Gets an enumeration describing the available options .
* @ return an enumeration of all the available options . */
@ Override public Enumeration listOptions ( ) { } } | Vector result ; result = new Vector ( ) ; result . addElement ( new Option ( "\tWhether to enable debugging output.\n" + "\t(default off)" , "D" , 0 , "-D" ) ) ; return result . elements ( ) ; |
public class PrimitiveArrayDump { /** * ~ Methods - - - - - */
public long getSize ( ) { } } | long elementSize = dumpClass . getHprof ( ) . getValueSize ( getType ( ) ) ; return dumpClass . classDumpSegment . getMinimumInstanceSize ( ) + HPROF_ARRAY_OVERHEAD + ( elementSize * getLength ( ) ) ; |
public class Parser { /** * have to pass in ' let ' kwd position to compute kid offsets properly */
private AstNode let ( boolean isStatement , int pos ) throws IOException { } } | LetNode pn = new LetNode ( pos ) ; pn . setLineno ( ts . lineno ) ; if ( mustMatchToken ( Token . LP , "msg.no.paren.after.let" , true ) ) pn . setLp ( ts . tokenBeg - pos ) ; pushScope ( pn ) ; try { VariableDeclaration vars = variables ( Token . LET , ts . tokenBeg , isStatement ) ; pn . setVariables ( vars ) ; if ( mustMatchToken ( Token . RP , "msg.no.paren.let" , true ) ) { pn . setRp ( ts . tokenBeg - pos ) ; } if ( isStatement && peekToken ( ) == Token . LC ) { // let statement
consumeToken ( ) ; int beg = ts . tokenBeg ; // position stmt at LC
AstNode stmt = statements ( ) ; mustMatchToken ( Token . RC , "msg.no.curly.let" , true ) ; stmt . setLength ( ts . tokenEnd - beg ) ; pn . setLength ( ts . tokenEnd - pos ) ; pn . setBody ( stmt ) ; pn . setType ( Token . LET ) ; } else { // let expression
AstNode expr = expr ( ) ; pn . setLength ( getNodeEnd ( expr ) - pos ) ; pn . setBody ( expr ) ; if ( isStatement ) { // let expression in statement context
ExpressionStatement es = new ExpressionStatement ( pn , ! insideFunction ( ) ) ; es . setLineno ( pn . getLineno ( ) ) ; return es ; } } } finally { popScope ( ) ; } return pn ; |
public class AbstractScoreBasedFeatureSelector { /** * { @ inheritDoc } */
@ Override protected void _transform ( Dataframe newData ) { } } | Set < Object > selectedFeatures = knowledgeBase . getModelParameters ( ) . getFeatureScores ( ) . keySet ( ) ; StorageEngine storageEngine = knowledgeBase . getStorageEngine ( ) ; Set < TypeInference . DataType > supportedXDataTypes = getSupportedXDataTypes ( ) ; Map < Object , Boolean > tmp_removedColumns = storageEngine . getBigMap ( "tmp_removedColumns" , Object . class , Boolean . class , StorageEngine . MapType . HASHMAP , StorageEngine . StorageHint . IN_MEMORY , true , true ) ; // keep only the columns which are compatible with the algorithm
Stream < Object > compatibleColumns = newData . getXDataTypes ( ) . entrySet ( ) . stream ( ) . filter ( e -> supportedXDataTypes . contains ( e . getValue ( ) ) ) . map ( e -> e . getKey ( ) ) ; streamExecutor . forEach ( StreamMethods . stream ( compatibleColumns , isParallelized ( ) ) , column -> { if ( ! selectedFeatures . contains ( column ) ) { tmp_removedColumns . put ( column , true ) ; } } ) ; logger . debug ( "Removing Columns" ) ; newData . dropXColumns ( tmp_removedColumns . keySet ( ) ) ; storageEngine . dropBigMap ( "tmp_removedColumns" , tmp_removedColumns ) ; |
public class ssl_stats { /** * < pre >
* converts nitro response into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_response ( nitro_service service , String response ) throws Exception { } } | ssl_stats [ ] resources = new ssl_stats [ 1 ] ; ssl_response result = ( ssl_response ) service . get_payload_formatter ( ) . string_to_resource ( ssl_response . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == 444 ) { service . clear_session ( ) ; } if ( result . severity != null ) { if ( result . severity . equals ( "ERROR" ) ) throw new nitro_exception ( result . message , result . errorcode ) ; } else { throw new nitro_exception ( result . message , result . errorcode ) ; } } resources [ 0 ] = result . ssl ; return resources ; |
public class InputElementStack { /** * Callback method called by the namespace default provider . At
* this point we can trust it to only call this method with somewhat
* valid arguments ( no dups etc ) . */
public void addNsBinding ( String prefix , String uri ) { } } | // Unbind ? ( xml 1.1 . . . )
if ( ( uri == null ) || ( uri . length ( ) == 0 ) ) { uri = null ; } // Default ns declaration ?
if ( ( prefix == null ) || ( prefix . length ( ) == 0 ) ) { prefix = null ; mCurrElement . mDefaultNsURI = uri ; } mNamespaces . addStrings ( prefix , uri ) ; |
public class IdGenerator { /** * Generates a tiny id ( various bit long , does not include node info ) .
* Format : { @ code < 41 - bit : timestamp > < 0 or 16bit : sequence number > } . Where { @ code timestamp } is in
* blocks of 10 - second , minus the epoch .
* Note :
* < ul >
* < li > Tiny IDs are not distributed ! IDs generated from different nodes can be duplicated since
* ID does not include node info . < / li >
* < li > If IDs are generated at low rate ( ~ 1 ID per 10 seconds ) , then ID is " tiny " ( 41 - bit
* long ) . Otherwise it is suffixed by a 16 - bit sequence number . < / li >
* < li > Hence , generated IDs may NOT be in ascending order ! < / li >
* < / ul >
* @ return */
synchronized public long generateIdTiny ( ) { } } | final long blockSize = 10000L ; // block 10000 ms
long timestamp = System . currentTimeMillis ( ) / blockSize ; long sequence = 0 ; boolean done = false ; while ( ! done ) { done = true ; while ( timestamp < lastTimestampMillisec . get ( ) / blockSize ) { timestamp = waitTillNextTick ( timestamp , blockSize ) ; } if ( timestamp == lastTimestampMillisec . get ( ) / blockSize ) { sequence = sequenceTiny . incrementAndGet ( ) ; if ( sequence > MAX_SEQUENCE_TINY ) { // reset sequence
sequenceTiny . set ( sequence = 0 ) ; timestamp = waitTillNextTick ( timestamp , blockSize ) ; done = false ; } } } sequenceTiny . set ( sequence ) ; lastTimestampMillisec . set ( timestamp * blockSize ) ; timestamp -= TIMESTAMP_EPOCH / blockSize ; return sequence == 0 ? timestamp : ( timestamp << SHIFT_TIMESTAMP_TINY ) | ( sequence & MASK_SEQUENCE_TINY ) ; |
public class RedisClient { /** * Create a new client that connects to the supplied uri with shared { @ link ClientResources } . You need to shut down the
* { @ link ClientResources } upon shutting down your application . You can connect to different Redis servers but you must
* supply a { @ link RedisURI } on connecting .
* @ param clientResources the client resources , must not be { @ literal null }
* @ param uri the Redis URI , must not be { @ literal null }
* @ return a new instance of { @ link RedisClient } */
public static RedisClient create ( ClientResources clientResources , String uri ) { } } | assertNotNull ( clientResources ) ; LettuceAssert . notEmpty ( uri , "URI must not be empty" ) ; return create ( clientResources , RedisURI . create ( uri ) ) ; |
public class OpenPgpPubSubUtil { /** * Fetch the OpenPGP public key of a { @ code contact } , identified by its OpenPGP { @ code v4 _ fingerprint } .
* @ see < a href = " https : / / xmpp . org / extensions / xep - 0373 . html # discover - pubkey " > XEP - 0373 § 4.3 < / a >
* @ param connection XMPP connection
* @ param contact { @ link BareJid } of the contact we want to fetch a key from .
* @ param v4 _ fingerprint upper case , hex encoded v4 fingerprint of the contacts key .
* @ return { @ link PubkeyElement } containing the requested public key .
* @ throws InterruptedException if the thread gets interrupted . A
* @ throws XMPPException . XMPPErrorException in case of an XMPP protocol error .
* @ throws PubSubException . NotAPubSubNodeException in case the targeted entity is not a PubSub node .
* @ throws PubSubException . NotALeafNodeException in case the fetched node is not a { @ link LeafNode } .
* @ throws SmackException . NotConnectedException in case we are not connected .
* @ throws SmackException . NoResponseException if the server doesn ' t respond . */
public static PubkeyElement fetchPubkey ( XMPPConnection connection , BareJid contact , OpenPgpV4Fingerprint v4_fingerprint ) throws InterruptedException , XMPPException . XMPPErrorException , PubSubException . NotAPubSubNodeException , PubSubException . NotALeafNodeException , SmackException . NotConnectedException , SmackException . NoResponseException { } } | PubSubManager pm = PubSubManager . getInstanceFor ( connection , contact ) ; String nodeName = PEP_NODE_PUBLIC_KEY ( v4_fingerprint ) ; LeafNode node = getLeafNode ( pm , nodeName ) ; List < PayloadItem < PubkeyElement > > list = node . getItems ( 1 ) ; if ( list . isEmpty ( ) ) { return null ; } return list . get ( 0 ) . getPayload ( ) ; |
public class CleverTapAPI { /** * Validation */
private void pushValidationResult ( ValidationResult vr ) { } } | synchronized ( pendingValidationResultsLock ) { try { int len = pendingValidationResults . size ( ) ; if ( len > 50 ) { ArrayList < ValidationResult > trimmed = new ArrayList < > ( ) ; // Trim down the list to 40 , so that this loop needn ' t run for the next 10 events
// Hence , skip the first 10 elements
for ( int i = 10 ; i < len ; i ++ ) trimmed . add ( pendingValidationResults . get ( i ) ) ; trimmed . add ( vr ) ; pendingValidationResults = trimmed ; } else { pendingValidationResults . add ( vr ) ; } } catch ( Exception e ) { // no - op
} } |
public class BELDataConversionException { /** * { @ inheritDoc } */
@ Override public String getUserFacingMessage ( ) { } } | final StringBuilder bldr = new StringBuilder ( ) ; bldr . append ( "PARSE ERROR" ) ; final String name = getName ( ) ; if ( name != null ) { bldr . append ( " in " ) ; bldr . append ( name ) ; } bldr . append ( "\n\treason: " ) ; bldr . append ( String . format ( MSG_FMT , propertyValue , propertyName ) ) ; bldr . append ( "\n" ) ; return bldr . toString ( ) ; |
public class GenericDao { /** * 根据查询条件获取结果集列表
* @ param matches
* @ param order
* @ param offset
* @ param limit
* @ return */
public List < ENTITY > find ( List < Match > matches , List < Order > order , int offset , int limit ) { } } | // FIXME
Query operate = queryGenerator . getSelectQuery ( matches , ( order == null ) ? null : order . toArray ( new Order [ order . size ( ) ] ) ) ; String sql = operate . getSql ( ) ; List < Object > params = operate . getParams ( ) ; if ( offset != 0 || limit != 0 ) { sql = sql + " limit ?, ?" ; params . add ( offset ) ; params . add ( limit ) ; } // 执行操作
return findBySQL ( sql , operate . getParams ( ) ) ; |
public class CmsSearchIndex { /** * Extends the given path query with another term for the given search root element . < p >
* @ param terms the path filter to extend
* @ param searchRoot the search root to add to the path query */
protected void extendPathFilter ( List < Term > terms , String searchRoot ) { } } | if ( ! CmsResource . isFolder ( searchRoot ) ) { searchRoot += "/" ; } terms . add ( new Term ( CmsSearchField . FIELD_PARENT_FOLDERS , searchRoot ) ) ; |
public class FastaFormat { /** * method to initialize map of transform nucleotides */
private static void initMapTransformNucleotides ( ) { } } | transformNucleotides = new HashMap < String , String > ( ) ; for ( Map . Entry e : nucleotides . entrySet ( ) ) { transformNucleotides . put ( e . getValue ( ) . toString ( ) , e . getKey ( ) . toString ( ) ) ; } |
public class DefaultWhenVertx { /** * Undeploy a verticle
* @ param deploymentID The deployment ID
* @ return A promise for undeployment completion */
@ Override public Promise < Void > undeploy ( String deploymentID ) { } } | return adapter . toPromise ( handler -> vertx . undeploy ( deploymentID , handler ) ) ; |
public class ExplodedExporterDelegate { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . impl . base . exporter . AbstractExporterDelegate # processNode ( ArchivePath , Node ) */
@ Override protected void processNode ( ArchivePath path , Node node ) { } } | // Get path to file
final String assetFilePath = path . get ( ) ; // Create a file for the asset
final File assetFile = new File ( outputDirectory , assetFilePath ) ; // Get the assets parent parent directory and make sure it exists
final File assetParent = assetFile . getParentFile ( ) ; if ( ! assetParent . exists ( ) ) { if ( ! assetParent . mkdirs ( ) ) { throw new ArchiveExportException ( "Failed to write asset. Unable to create parent directory." ) ; } } // Handle Archive assets separately
if ( node != null && node . getAsset ( ) instanceof ArchiveAsset ) { ArchiveAsset nesteArchiveAsset = ArchiveAsset . class . cast ( node . getAsset ( ) ) ; processArchiveAsset ( assetParent , nesteArchiveAsset ) ; return ; } // Handle directory assets separately
try { final boolean isDirectory = ( node . getAsset ( ) == null ) ; if ( isDirectory ) { // If doesn ' t already exist
if ( ! assetFile . exists ( ) ) { // Attempt a create
if ( ! assetFile . mkdirs ( ) ) { // Some error in writing
throw new ArchiveExportException ( "Failed to write directory: " + assetFile . getAbsolutePath ( ) ) ; } } } // Only handle non - directory assets , otherwise the path is handled above
else { try { if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "Writing asset " + path . get ( ) + " to " + assetFile . getAbsolutePath ( ) ) ; } // Get the asset streams
final InputStream assetInputStream = node . getAsset ( ) . openStream ( ) ; final FileOutputStream assetFileOutputStream = new FileOutputStream ( assetFile ) ; final BufferedOutputStream assetBufferedOutputStream = new BufferedOutputStream ( assetFileOutputStream , 8192 ) ; // Write contents
IOUtil . copyWithClose ( assetInputStream , assetBufferedOutputStream ) ; } catch ( final Exception e ) { // Provide a more detailed exception than the outer block
throw new ArchiveExportException ( "Failed to write asset " + path + " to " + assetFile , e ) ; } } } catch ( final Exception e ) { throw new ArchiveExportException ( "Unexpected error encountered in export of " + node , e ) ; } |
public class BatchKernelImpl { /** * stop all jobs
* - for each TLJ : stop job */
private void registerExecutingJob ( long jobExecutionId , BatchJobWorkUnit workUnit ) { } } | if ( executingJobs . containsKey ( jobExecutionId ) ) { throw new IllegalStateException ( "Already have entry in executingJobs map for job exec id = " + jobExecutionId ) ; } executingJobs . put ( jobExecutionId , workUnit ) ; |
public class MembershipHandlerImpl { /** * Notifying listeners before membership creation .
* @ param membership
* the membership which is used in create operation
* @ param isNew
* true , if we have a deal with new membership , otherwise it is false
* which mean update operation is in progress
* @ throws Exception
* if any listener failed to handle the event */
private void preSave ( Membership membership , boolean isNew ) throws Exception { } } | for ( MembershipEventListener listener : listeners ) { listener . preSave ( membership , isNew ) ; } |
public class BitConverter { /** * Writes the given 16 - bit Short to the given ArrayView at the given offset .
* @ param target The ArrayView to write to .
* @ param offset The offset within the ArrayView to write at .
* @ param value The value to write .
* @ return The number of bytes written . */
public static int writeShort ( ArrayView target , int offset , short value ) { } } | return writeShort ( target . array ( ) , target . arrayOffset ( ) + offset , value ) ; |
public class TimeSeriesLookup { /** * Lookup time series associated with the given metric , tagk , tagv or tag
* pairs . Either the meta table or the data table will be scanned . If no
* metric is given , a full table scan must be performed and this call may take
* a long time to complete .
* When dumping to stdout , if an ID can ' t be looked up , it will be logged and
* skipped .
* @ return A list of TSUIDs matching the given lookup query .
* @ throws NoSuchUniqueName if any of the given names fail to resolve to a
* UID .
* @ since 2.2 */
public Deferred < List < byte [ ] > > lookupAsync ( ) { } } | final Pattern tagv_regex = tagv_filter != null ? Pattern . compile ( tagv_filter ) : null ; // we don ' t really know what size the UIDs will resolve to so just grab
// a decent amount .
final StringBuffer buf = to_stdout ? new StringBuffer ( 2048 ) : null ; final long start = System . currentTimeMillis ( ) ; final int limit ; if ( query . getLimit ( ) > 0 ) { if ( query . useMeta ( ) || Const . SALT_WIDTH ( ) < 1 ) { limit = query . getLimit ( ) ; } else if ( query . getLimit ( ) < Const . SALT_BUCKETS ( ) ) { limit = 1 ; } else { limit = query . getLimit ( ) / Const . SALT_BUCKETS ( ) ; } } else { limit = 0 ; } class ScannerCB implements Callback < Deferred < List < byte [ ] > > , ArrayList < ArrayList < KeyValue > > > { private final Scanner scanner ; // used to avoid dupes when scanning the data table
private byte [ ] last_tsuid = null ; private int rows_read ; ScannerCB ( final Scanner scanner ) { this . scanner = scanner ; } Deferred < List < byte [ ] > > scan ( ) { return scanner . nextRows ( ) . addCallbackDeferring ( this ) ; } @ Override public Deferred < List < byte [ ] > > call ( final ArrayList < ArrayList < KeyValue > > rows ) throws Exception { if ( rows == null ) { scanner . close ( ) ; if ( query . useMeta ( ) || Const . SALT_WIDTH ( ) < 1 ) { LOG . debug ( "Lookup query matched " + tsuids . size ( ) + " time series in " + ( System . currentTimeMillis ( ) - start ) + " ms" ) ; } return Deferred . fromResult ( tsuids ) ; } for ( final ArrayList < KeyValue > row : rows ) { if ( limit > 0 && rows_read >= limit ) { // little recursion to close the scanner and log above .
return call ( null ) ; } final byte [ ] tsuid = query . useMeta ( ) ? row . get ( 0 ) . key ( ) : UniqueId . getTSUIDFromKey ( row . get ( 0 ) . key ( ) , TSDB . metrics_width ( ) , Const . TIMESTAMP_BYTES ) ; // TODO - there MUST be a better way than creating a ton of temp
// string objects .
if ( tagv_regex != null && ! tagv_regex . matcher ( new String ( tsuid , CHARSET ) ) . find ( ) ) { continue ; } if ( to_stdout ) { if ( last_tsuid != null && Bytes . memcmp ( last_tsuid , tsuid ) == 0 ) { continue ; } last_tsuid = tsuid ; try { buf . append ( UniqueId . uidToString ( tsuid ) ) . append ( " " ) ; buf . append ( RowKey . metricNameAsync ( tsdb , tsuid ) . joinUninterruptibly ( ) ) ; buf . append ( " " ) ; final List < byte [ ] > tag_ids = UniqueId . getTagPairsFromTSUID ( tsuid ) ; final Map < String , String > resolved_tags = Tags . resolveIdsAsync ( tsdb , tag_ids ) . joinUninterruptibly ( ) ; for ( final Map . Entry < String , String > tag_pair : resolved_tags . entrySet ( ) ) { buf . append ( tag_pair . getKey ( ) ) . append ( "=" ) . append ( tag_pair . getValue ( ) ) . append ( " " ) ; } } catch ( NoSuchUniqueId nsui ) { LOG . error ( "Unable to resolve UID in TSUID (" + UniqueId . uidToString ( tsuid ) + ") " + nsui . getMessage ( ) ) ; } buf . setLength ( 0 ) ; // reset the buffer so we can re - use it
} else { tsuids . add ( tsuid ) ; } ++ rows_read ; } return scan ( ) ; } @ Override public String toString ( ) { return "Scanner callback" ; } } class CompleteCB implements Callback < List < byte [ ] > , ArrayList < List < byte [ ] > > > { @ Override public List < byte [ ] > call ( final ArrayList < List < byte [ ] > > unused ) throws Exception { LOG . debug ( "Lookup query matched " + tsuids . size ( ) + " time series in " + ( System . currentTimeMillis ( ) - start ) + " ms" ) ; return tsuids ; } @ Override public String toString ( ) { return "Final async lookup callback" ; } } class UIDCB implements Callback < Deferred < List < byte [ ] > > , Object > { @ Override public Deferred < List < byte [ ] > > call ( Object arg0 ) throws Exception { if ( ! query . useMeta ( ) && Const . SALT_WIDTH ( ) > 0 && metric_uid != null ) { final ArrayList < Deferred < List < byte [ ] > > > deferreds = new ArrayList < Deferred < List < byte [ ] > > > ( Const . SALT_BUCKETS ( ) ) ; for ( int i = 0 ; i < Const . SALT_BUCKETS ( ) ; i ++ ) { deferreds . add ( new ScannerCB ( getScanner ( i ) ) . scan ( ) ) ; } return Deferred . group ( deferreds ) . addCallback ( new CompleteCB ( ) ) ; } else { return new ScannerCB ( getScanner ( 0 ) ) . scan ( ) ; } } @ Override public String toString ( ) { return "UID resolution callback" ; } } return resolveUIDs ( ) . addCallbackDeferring ( new UIDCB ( ) ) ; |
public class CmsSecurityManager { /** * Returns all resources of the given organizational unit . < p >
* @ param context the current request context
* @ param orgUnit the organizational unit to get all resources for
* @ return all < code > { @ link CmsResource } < / code > objects in the organizational unit
* @ throws CmsException if operation was not successful
* @ see org . opencms . security . CmsOrgUnitManager # getResourcesForOrganizationalUnit ( CmsObject , String )
* @ see org . opencms . security . CmsOrgUnitManager # getGroups ( CmsObject , String , boolean )
* @ see org . opencms . security . CmsOrgUnitManager # getUsers ( CmsObject , String , boolean ) */
public List < CmsResource > getResourcesForOrganizationalUnit ( CmsRequestContext context , CmsOrganizationalUnit orgUnit ) throws CmsException { } } | List < CmsResource > result = null ; CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { result = m_driverManager . getResourcesForOrganizationalUnit ( dbc , orgUnit ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_READ_ORGUNIT_RESOURCES_1 , orgUnit . getName ( ) ) , e ) ; } finally { dbc . clear ( ) ; } return result ; |
public class TransformationUtils { /** * Scans a XML file for transformation descriptions and returns all successfully read transformation descriptions . */
public static List < TransformationDescription > getDescriptionsFromXMLFile ( File file ) { } } | List < TransformationDescription > desc = new ArrayList < TransformationDescription > ( ) ; try { return loadDescrtipionsFromXMLInputSource ( new InputSource ( file . getAbsolutePath ( ) ) , file . getName ( ) ) ; } catch ( Exception e ) { LOGGER . error ( "Unable to read the descriptions from file " + file . getAbsolutePath ( ) , e ) ; } return desc ; |
public class BigendianEncoding { /** * Returns the { @ code long } value whose base16 representation is stored in the first 16 chars of
* { @ code chars } starting from the { @ code offset } .
* @ param chars the base16 representation of the { @ code long } .
* @ param offset the starting offset in the { @ code CharSequence } . */
static long longFromBase16String ( CharSequence chars , int offset ) { } } | Utils . checkArgument ( chars . length ( ) >= offset + LONG_BASE16 , "chars too small" ) ; return ( decodeByte ( chars . charAt ( offset ) , chars . charAt ( offset + 1 ) ) & 0xFFL ) << 56 | ( decodeByte ( chars . charAt ( offset + 2 ) , chars . charAt ( offset + 3 ) ) & 0xFFL ) << 48 | ( decodeByte ( chars . charAt ( offset + 4 ) , chars . charAt ( offset + 5 ) ) & 0xFFL ) << 40 | ( decodeByte ( chars . charAt ( offset + 6 ) , chars . charAt ( offset + 7 ) ) & 0xFFL ) << 32 | ( decodeByte ( chars . charAt ( offset + 8 ) , chars . charAt ( offset + 9 ) ) & 0xFFL ) << 24 | ( decodeByte ( chars . charAt ( offset + 10 ) , chars . charAt ( offset + 11 ) ) & 0xFFL ) << 16 | ( decodeByte ( chars . charAt ( offset + 12 ) , chars . charAt ( offset + 13 ) ) & 0xFFL ) << 8 | ( decodeByte ( chars . charAt ( offset + 14 ) , chars . charAt ( offset + 15 ) ) & 0xFFL ) ; |
public class DefaultDummyFactory { /** * Returns a dummy entity for the requested type .
* All this method guarantees is that the returned entity is a valid entity with all non null foreign key relations filled in ,
* it does not guarantee useful or even semantically correct data .
* The dummy objects get taken either from the list of objects to insert from the redG object or are created on the fly . The results are cached and the
* same entity will be returned for consecutive calls for an entity of the same type .
* @ param redG The redG instance
* @ param dummyClass The class specifying the wanted entity type
* @ param < T > The wanted entity type
* @ return a dummy object of thew required type
* @ throws DummyCreationException When a new dummy has to be created but this creation fails */
@ Override public < T extends RedGEntity > T getDummy ( final AbstractRedG redG , final Class < T > dummyClass ) { } } | // check if a dummy for this type already exists in cache
if ( this . dummyCache . containsKey ( dummyClass ) ) { return dummyClass . cast ( this . dummyCache . get ( dummyClass ) ) ; } final T obj = createNewDummy ( redG , dummyClass ) ; // if no one is found , create new
this . dummyCache . put ( dummyClass , obj ) ; return obj ; |
public class Example3Main { /** * Example for a path query on Course . teacher . name = = myName .
* @ param name */
@ SuppressWarnings ( "unchecked" ) private void queryForCoursesByTeacher ( String name ) { } } | System . out . println ( ">> Query for courses by teacher " + name + " returned:" ) ; // using reference in query
Query q = pm . newQuery ( Course . class , "teacher.name == '" + name + "'" ) ; Collection < Course > courses = ( Collection < Course > ) q . execute ( ) ; for ( Course c : courses ) { System . out . println ( ">> - " + c . getName ( ) + " by " + c . getTeacher ( ) . getName ( ) ) ; } |
public class StatefulBeanReaper { /** * d112258 */
public boolean beanExistsAndTimedOut ( TimeoutElement elt , BeanId beanId ) { } } | // If the bean does not exist in the Reaper ' s list , return false . This
// may occur if remove ( ) has been called on the bean , while the reaper
// is attempting to time it out .
if ( elt == null ) { if ( ivSfFailoverCache != null ) // LIDB2018-1
{ return ivSfFailoverCache . beanExistsAndTimedOut ( beanId ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Session bean not in Reaper: Timeout = false" ) ; } return false ; } return elt . isTimedOut ( ) ; // F61004.5 |
public class TileGenerator { /** * Generate the tiles for the zoom level
* @ param tileMatrixDao
* @ param tileDao
* @ param contents
* @ param zoomLevel
* @ param tileGrid
* @ param localTileGrid
* @ param matrixWidth
* @ param matrixHeight
* @ param update
* @ return tile count
* @ throws java . sql . SQLException
* @ throws java . io . IOException */
private int generateTiles ( TileMatrixDao tileMatrixDao , TileDao tileDao , Contents contents , int zoomLevel , TileGrid tileGrid , TileGrid localTileGrid , long matrixWidth , long matrixHeight , boolean update ) throws SQLException , IOException { } } | int count = 0 ; Integer tileWidth = null ; Integer tileHeight = null ; // Download and create the tile and each coordinate
for ( long x = tileGrid . getMinX ( ) ; x <= tileGrid . getMaxX ( ) ; x ++ ) { // Check if the progress has been cancelled
if ( progress != null && ! progress . isActive ( ) ) { break ; } for ( long y = tileGrid . getMinY ( ) ; y <= tileGrid . getMaxY ( ) ; y ++ ) { // Check if the progress has been cancelled
if ( progress != null && ! progress . isActive ( ) ) { break ; } try { // Create the tile
byte [ ] tileBytes = createTile ( zoomLevel , x , y ) ; if ( tileBytes != null ) { BufferedImage image = null ; // Compress the image
if ( compressFormat != null ) { image = ImageUtils . getImage ( tileBytes ) ; if ( image != null ) { tileBytes = ImageUtils . writeImageToBytes ( image , compressFormat , compressQuality ) ; } } // Create a new tile row
TileRow newRow = tileDao . newRow ( ) ; newRow . setZoomLevel ( zoomLevel ) ; long tileColumn = x ; long tileRow = y ; // Update the column and row to the local tile grid
// location
if ( localTileGrid != null ) { tileColumn = ( x - tileGrid . getMinX ( ) ) + localTileGrid . getMinX ( ) ; tileRow = ( y - tileGrid . getMinY ( ) ) + localTileGrid . getMinY ( ) ; } // If an update , delete an existing row
if ( update ) { tileDao . deleteTile ( tileColumn , tileRow , zoomLevel ) ; } newRow . setTileColumn ( tileColumn ) ; newRow . setTileRow ( tileRow ) ; newRow . setTileData ( tileBytes ) ; tileDao . create ( newRow ) ; count ++ ; // Determine the tile width and height
if ( tileWidth == null ) { if ( image == null ) { image = ImageUtils . getImage ( tileBytes ) ; } if ( image != null ) { tileWidth = image . getWidth ( ) ; tileHeight = image . getHeight ( ) ; } } } } catch ( Exception e ) { LOGGER . log ( Level . WARNING , "Failed to create tile. Zoom: " + zoomLevel + ", x: " + x + ", y: " + y , e ) ; // Skip this tile , don ' t increase count
} // Update the progress count , even on failures
if ( progress != null ) { progress . addZoomLevelProgress ( zoomLevel , 1 ) ; progress . addProgress ( 1 ) ; } } } // If none of the tiles were translated into a bitmap with dimensions ,
// delete them
if ( tileWidth == null || tileHeight == null ) { count = 0 ; StringBuilder where = new StringBuilder ( ) ; where . append ( tileDao . buildWhere ( TileTable . COLUMN_ZOOM_LEVEL , zoomLevel ) ) ; where . append ( " AND " ) ; where . append ( tileDao . buildWhere ( TileTable . COLUMN_TILE_COLUMN , tileGrid . getMinX ( ) , ">=" ) ) ; where . append ( " AND " ) ; where . append ( tileDao . buildWhere ( TileTable . COLUMN_TILE_COLUMN , tileGrid . getMaxX ( ) , "<=" ) ) ; where . append ( " AND " ) ; where . append ( tileDao . buildWhere ( TileTable . COLUMN_TILE_ROW , tileGrid . getMinY ( ) , ">=" ) ) ; where . append ( " AND " ) ; where . append ( tileDao . buildWhere ( TileTable . COLUMN_TILE_ROW , tileGrid . getMaxY ( ) , "<=" ) ) ; String [ ] whereArgs = tileDao . buildWhereArgs ( new Object [ ] { zoomLevel , tileGrid . getMinX ( ) , tileGrid . getMaxX ( ) , tileGrid . getMinY ( ) , tileGrid . getMaxY ( ) } ) ; tileDao . delete ( where . toString ( ) , whereArgs ) ; } else { // Check if the tile matrix already exists
boolean create = true ; if ( update ) { create = ! tileMatrixDao . idExists ( new TileMatrixKey ( tableName , zoomLevel ) ) ; } // Create the tile matrix
if ( create ) { // Calculate meters per pixel
double pixelXSize = ( tileGridBoundingBox . getMaxLongitude ( ) - tileGridBoundingBox . getMinLongitude ( ) ) / matrixWidth / tileWidth ; double pixelYSize = ( tileGridBoundingBox . getMaxLatitude ( ) - tileGridBoundingBox . getMinLatitude ( ) ) / matrixHeight / tileHeight ; // Create the tile matrix for this zoom level
TileMatrix tileMatrix = new TileMatrix ( ) ; tileMatrix . setContents ( contents ) ; tileMatrix . setZoomLevel ( zoomLevel ) ; tileMatrix . setMatrixWidth ( matrixWidth ) ; tileMatrix . setMatrixHeight ( matrixHeight ) ; tileMatrix . setTileWidth ( tileWidth ) ; tileMatrix . setTileHeight ( tileHeight ) ; tileMatrix . setPixelXSize ( pixelXSize ) ; tileMatrix . setPixelYSize ( pixelYSize ) ; tileMatrixDao . create ( tileMatrix ) ; } } return count ; |
public class ns_vm_template { /** * < pre >
* Use this operation to get NS VM Template .
* < / pre > */
public static ns_vm_template [ ] get ( nitro_service client ) throws Exception { } } | ns_vm_template resource = new ns_vm_template ( ) ; resource . validate ( "get" ) ; return ( ns_vm_template [ ] ) resource . get_resources ( client ) ; |
public class ComMethod { /** * Generates the default values for this ComMethod using the given { @ link UseDefaultValues } annotation
* @ param defValues the annotation containing the information to generate the default values . */
protected void generateDefaultParameters ( UseDefaultValues defValues ) { } } | int count = defValues . optParamIndex ( ) . length ; NativeType [ ] nt = defValues . nativeType ( ) ; Variant . Type [ ] vt = defValues . variantType ( ) ; String [ ] literal = defValues . literal ( ) ; for ( int i = 0 ; i < count ; i ++ ) { switch ( nt [ i ] ) { case Bool : case VariantBool : case VariantBool_ByRef : defaultParameters [ i ] = Boolean . parseBoolean ( literal [ i ] ) ; break ; case BSTR : case BSTR_ByRef : case CSTR : case Unicode : defaultParameters [ i ] = literal [ i ] ; break ; case Double : case Double_ByRef : defaultParameters [ i ] = Double . parseDouble ( literal [ i ] ) ; break ; case Float : case Float_ByRef : defaultParameters [ i ] = Float . parseFloat ( literal [ i ] ) ; break ; case Int8 : case Int8_ByRef : defaultParameters [ i ] = Byte . parseByte ( literal [ i ] ) ; break ; case Int16 : case Int16_ByRef : defaultParameters [ i ] = Short . parseShort ( literal [ i ] ) ; break ; case Int32 : case Int32_ByRef : defaultParameters [ i ] = Integer . parseInt ( literal [ i ] ) ; break ; case Int64 : case Int64_ByRef : defaultParameters [ i ] = Long . parseLong ( literal [ i ] ) ; break ; case GUID : defaultParameters [ i ] = new GUID ( literal [ i ] ) ; break ; case Currency : case Currency_ByRef : defaultParameters [ i ] = new BigDecimal ( literal [ i ] ) ; break ; case VARIANT : Variant v = new Variant ( ) ; switch ( vt [ i ] ) { case VT_I1 : case VT_UI1 : v . set ( Byte . parseByte ( literal [ i ] ) ) ; break ; case VT_I2 : case VT_UI2 : v . set ( Short . parseShort ( literal [ i ] ) ) ; break ; case VT_I4 : case VT_UI4 : case VT_INT : case VT_UINT : v . set ( Integer . parseInt ( literal [ i ] ) ) ; break ; case VT_I8 : v . set ( Long . parseLong ( literal [ i ] ) ) ; break ; case VT_R4 : v . set ( Float . parseFloat ( literal [ i ] ) ) ; break ; case VT_R8 : v . set ( Double . parseDouble ( literal [ i ] ) ) ; break ; case VT_BOOL : v . set ( Boolean . parseBoolean ( literal [ i ] ) ) ; break ; case VT_BSTR : v . set ( literal [ i ] ) ; break ; case VT_EMPTY : v = new Variant ( ) ; break ; case VT_ERROR : v . makeError ( ( int ) Long . parseLong ( literal [ i ] , 16 ) ) ; break ; // case VT _ CY : . . .
default : throw new UnsupportedOperationException ( "Don't know how to parse literal " + literal [ i ] + " to an Java Object corresponding to Variant.Type." + vt [ i ] . name ( ) ) ; } v . setType ( vt [ i ] ) ; defaultParameters [ i ] = v ; break ; default : throw new UnsupportedOperationException ( "Don't know how to parse literal " + literal [ i ] + " to an Java Object corresponding to NativeType." + nt [ i ] . name ( ) ) ; } } |
public class SVG { /** * Return the " preserveAspectRatio " attribute of the root { @ code < svg > }
* element in the form of an { @ link PreserveAspectRatio } object .
* @ return the preserveAspectRatio setting of the document ' s root { @ code < svg > } element .
* @ throws IllegalArgumentException if there is no current SVG document loaded . */
@ SuppressWarnings ( { } } | "WeakerAccess" , "unused" } ) public PreserveAspectRatio getDocumentPreserveAspectRatio ( ) { if ( this . rootElement == null ) throw new IllegalArgumentException ( "SVG document is empty" ) ; if ( this . rootElement . preserveAspectRatio == null ) return null ; return this . rootElement . preserveAspectRatio ; |
public class GradientTreeBoost { /** * Train L2 tree boost . */
private void train2 ( Attribute [ ] attributes , double [ ] [ ] x , int [ ] y ) { } } | int n = x . length ; int [ ] nc = new int [ k ] ; for ( int i = 0 ; i < n ; i ++ ) { nc [ y [ i ] ] ++ ; } int [ ] y2 = new int [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { if ( y [ i ] == 1 ) { y2 [ i ] = 1 ; } else { y2 [ i ] = - 1 ; } } double [ ] h = new double [ n ] ; // current F ( x _ i )
double [ ] response = new double [ n ] ; // response variable for regression tree .
double mu = Math . mean ( y2 ) ; b = 0.5 * Math . log ( ( 1 + mu ) / ( 1 - mu ) ) ; for ( int i = 0 ; i < n ; i ++ ) { h [ i ] = b ; } int [ ] [ ] order = SmileUtils . sort ( attributes , x ) ; RegressionTree . NodeOutput output = new L2NodeOutput ( response ) ; trees = new RegressionTree [ ntrees ] ; int [ ] perm = new int [ n ] ; int [ ] samples = new int [ n ] ; for ( int i = 0 ; i < n ; i ++ ) { perm [ i ] = i ; } for ( int m = 0 ; m < ntrees ; m ++ ) { Arrays . fill ( samples , 0 ) ; Math . permutate ( perm ) ; for ( int l = 0 ; l < k ; l ++ ) { int subj = ( int ) Math . round ( nc [ l ] * subsample ) ; int count = 0 ; for ( int i = 0 ; i < n && count < subj ; i ++ ) { int xi = perm [ i ] ; if ( y [ xi ] == l ) { samples [ xi ] = 1 ; count ++ ; } } } for ( int i = 0 ; i < n ; i ++ ) { response [ i ] = 2.0 * y2 [ i ] / ( 1 + Math . exp ( 2 * y2 [ i ] * h [ i ] ) ) ; } trees [ m ] = new RegressionTree ( attributes , x , response , maxNodes , 5 , x [ 0 ] . length , order , samples , output ) ; for ( int i = 0 ; i < n ; i ++ ) { h [ i ] += shrinkage * trees [ m ] . predict ( x [ i ] ) ; } } |
public class BusNetwork { /** * Replies the nearest bus hub to the given point .
* @ param point the point
* @ return the nearest bus hub or < code > null < / code > if none was found . */
@ Pure public final BusHub getNearestBusHub ( GeoLocationPoint point ) { } } | return getNearestBusHub ( point . getX ( ) , point . getY ( ) ) ; |
public class AmazonWorkLinkClient { /** * Creates a fleet . A fleet consists of resources and the configuration that delivers associated websites to
* authorized users who download and set up the Amazon WorkLink app .
* @ param createFleetRequest
* @ return Result of the CreateFleet operation returned by the service .
* @ throws UnauthorizedException
* You are not authorized to perform this action .
* @ throws InternalServerErrorException
* The service is temporarily unavailable .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws ResourceNotFoundException
* The requested resource was not found .
* @ throws ResourceAlreadyExistsException
* The resource already exists .
* @ throws TooManyRequestsException
* The number of requests exceeds the limit .
* @ sample AmazonWorkLink . CreateFleet
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / worklink - 2018-09-25 / CreateFleet " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateFleetResult createFleet ( CreateFleetRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateFleet ( request ) ; |
public class MapComposedElement { /** * Replies the specified point at the given index .
* < p > If the < var > index < / var > is negative , it will corresponds
* to an index starting from the end of the list .
* @ param index is the index of the desired point
* @ return the point at the given index
* @ throws IndexOutOfBoundsException in case of error . */
@ Pure public Point2d getPointAt ( int index ) { } } | final int count = getPointCount ( ) ; int idx = index ; if ( idx < 0 ) { idx = count + idx ; } if ( idx < 0 ) { throw new IndexOutOfBoundsException ( idx + "<0" ) ; // $ NON - NLS - 1 $
} if ( idx >= count ) { throw new IndexOutOfBoundsException ( idx + ">=" + count ) ; // $ NON - NLS - 1 $
} return new Point2d ( this . pointCoordinates [ idx * 2 ] , this . pointCoordinates [ idx * 2 + 1 ] ) ; |
public class Blog { /** * Get likes posts for this blog
* @ param options a map of options ( or null )
* @ return A List of posts */
public List < Post > likedPosts ( Map < String , ? > options ) { } } | return client . blogLikes ( this . name , options ) ; |
public class ObjectFactory { /** * Create an instance of { @ link Project . Layouts . Layout . Bars . BarGroup . Bar } */
public Project . Layouts . Layout . Bars . BarGroup . Bar createProjectLayoutsLayoutBarsBarGroupBar ( ) { } } | return new Project . Layouts . Layout . Bars . BarGroup . Bar ( ) ; |
public class FutureMono { /** * Convert a supplied { @ link Future } for each subscriber into { @ link Mono } .
* { @ link Mono # subscribe ( Subscriber ) }
* will bridge to { @ link Future # addListener ( GenericFutureListener ) } .
* In addition , current method allows interaction with downstream context , so it
* may be transferred to implicitly connected upstream
* Example :
* < pre > < code >
* Flux & lt ; String & gt ; dataStream = Flux . just ( " a " , " b " , " c " ) ;
* FutureMono . deferFutureWithContext ( ( subscriberContext ) - >
* context ( ) . channel ( )
* . writeAndFlush ( PublisherContext . withContext ( dataStream , subscriberContext ) ) ) ;
* < / code > < / pre >
* @ param deferredFuture the future to evaluate and convert from
* @ param < F > the future type
* @ return A { @ link Mono } forwarding { @ link Future } success or failure */
public static < F extends Future < Void > > Mono < Void > deferFutureWithContext ( Function < Context , F > deferredFuture ) { } } | return new DeferredContextFutureMono < > ( deferredFuture ) ; |
public class EvaluatorImpl { /** * Implement Number compare */
public static int compare ( Number a , Number b ) { } } | int aType = getType ( a ) ; int bType = getType ( b ) ; int compType = ( aType >= bType ) ? aType : bType ; switch ( compType ) { case INT : int li = a . intValue ( ) ; int ri = b . intValue ( ) ; return ( li < ri ) ? - 1 : ( li == ri ) ? 0 : 1 ; case LONG : long ll = a . longValue ( ) ; long rl = b . longValue ( ) ; return ( ll < rl ) ? - 1 : ( ll == rl ) ? 0 : 1 ; case FLOAT : float lf = a . floatValue ( ) ; float rf = b . floatValue ( ) ; return ( lf < rf ) ? - 1 : ( lf == rf ) ? 0 : 1 ; case DOUBLE : double ld = a . doubleValue ( ) ; double rd = b . doubleValue ( ) ; return ( ld < rd ) ? - 1 : ( ld == rd ) ? 0 : 1 ; default : throw new IllegalStateException ( ) ; } |
public class AbstractMessageHandler { /** * Safe write error response .
* @ param channel the channel
* @ param header the request header
* @ param error the exception */
protected static void safeWriteErrorResponse ( final Channel channel , final ManagementProtocolHeader header , final Throwable error ) { } } | if ( header . getType ( ) == ManagementProtocol . TYPE_REQUEST ) { try { writeErrorResponse ( channel , ( ManagementRequestHeader ) header , error ) ; } catch ( IOException ioe ) { ProtocolLogger . ROOT_LOGGER . tracef ( ioe , "failed to write error response for %s on channel: %s" , header , channel ) ; } } |
public class AVObject { /** * create a new instance with particular class and objectId .
* @ param clazz class info
* @ param objectId object id
* @ param < T >
* @ return
* @ throws AVException */
public static < T extends AVObject > T createWithoutData ( Class < T > clazz , String objectId ) throws AVException { } } | try { T obj = clazz . newInstance ( ) ; obj . setClassName ( Transformer . getSubClassName ( clazz ) ) ; obj . setObjectId ( objectId ) ; return obj ; } catch ( Exception ex ) { throw new AVException ( ex ) ; } |
public class Branch { /** * < p > Singleton method to return the pre - initialised , or newly initialise and return , a singleton
* object of the type { @ link Branch } . < / p >
* < p > Use this whenever you need to call a method directly on the { @ link Branch } object . < / p >
* @ param context A { @ link Context } from which this call was made .
* @ return An initialised { @ link Branch } object , either fetched from a pre - initialised
* instance within the singleton class , or a newly instantiated object where
* one was not already requested during the current app lifecycle . */
@ TargetApi ( Build . VERSION_CODES . ICE_CREAM_SANDWICH ) public static Branch getAutoInstance ( @ NonNull Context context ) { } } | isAutoSessionMode_ = true ; customReferrableSettings_ = CUSTOM_REFERRABLE_SETTINGS . USE_DEFAULT ; boolean isTest = BranchUtil . checkTestMode ( context ) ; getBranchInstance ( context , ! isTest , null ) ; return branchReferral_ ; |
public class U { /** * Documented , # reject */
public static < E > List < E > reject ( final List < E > list , final Predicate < E > pred ) { } } | return filter ( list , new Predicate < E > ( ) { @ Override public boolean test ( E input ) { return ! pred . test ( input ) ; } } ) ; |
public class SDVariable { /** * Norm2 ( L2 norm ) reduction operation : The output contains the L2 norm for each tensor / subset along the specified dimensions : < br >
* { @ code out = sqrt ( sum _ i x [ i ] ^ 2 ) } < br >
* Note that if keepDims = true , the output variable has the same rank as the input variable ,
* with the reduced dimensions having size 1 . This can be useful for later broadcast operations ( such as subtracting
* the mean along a dimension ) . < br >
* Example : if input has shape [ a , b , c ] and dimensions = [ 1 ] then output has shape :
* keepDims = true : [ a , 1 , c ] < br >
* keepDims = false : [ a , c ]
* @ param name Output variable name
* @ param keepDims If true : keep the dimensions that are reduced on ( as size 1 ) . False : remove the reduction dimensions
* @ param dimensions dimensions to reduce over
* @ return Output variable */
public SDVariable norm2 ( String name , boolean keepDims , int ... dimensions ) { } } | return sameDiff . norm2 ( name , this , keepDims , dimensions ) ; |
public class PrintStream { /** * Writes the specified byte to this stream . If the byte is a newline and
* automatic flushing is enabled then the < code > flush < / code > method will be
* invoked .
* < p > Note that the byte is written as given ; to write a character that
* will be translated according to the platform ' s default character
* encoding , use the < code > print ( char ) < / code > or < code > println ( char ) < / code >
* methods .
* @ param b The byte to be written
* @ see # print ( char )
* @ see # println ( char ) */
public void write ( int b ) { } } | try { synchronized ( this ) { ensureOpen ( ) ; out . write ( b ) ; if ( ( b == '\n' ) && autoFlush ) out . flush ( ) ; } } catch ( InterruptedIOException x ) { Thread . currentThread ( ) . interrupt ( ) ; } catch ( IOException x ) { trouble = true ; } |
public class UriEscapeUtil { /** * Perform an escape operation , based on String , according to the specified type . */
static String escape ( final String text , final UriEscapeType escapeType , final String encoding ) { } } | if ( text == null ) { return null ; } StringBuilder strBuilder = null ; final int offset = 0 ; final int max = text . length ( ) ; int readOffset = offset ; for ( int i = offset ; i < max ; i ++ ) { final int codepoint = Character . codePointAt ( text , i ) ; /* * Shortcut : most characters will be alphabetic , and we won ' t need to do anything at
* all for them . No need to use the complete UriEscapeType check system at all . */
if ( UriEscapeType . isAlpha ( codepoint ) ) { continue ; } /* * Check whether the character is allowed or not */
if ( escapeType . isAllowed ( codepoint ) ) { continue ; } /* * At this point we know for sure we will need some kind of escape , so we
* can increase the offset and initialize the string builder if needed , along with
* copying to it all the contents pending up to this point . */
if ( strBuilder == null ) { strBuilder = new StringBuilder ( max + 20 ) ; } if ( i - readOffset > 0 ) { strBuilder . append ( text , readOffset , i ) ; } if ( Character . charCount ( codepoint ) > 1 ) { // This is to compensate that we are actually reading two char [ ] positions with a single codepoint .
i ++ ; } readOffset = i + 1 ; /* * Perform the real escape */
final byte [ ] charAsBytes ; try { charAsBytes = new String ( Character . toChars ( codepoint ) ) . getBytes ( encoding ) ; } catch ( final UnsupportedEncodingException e ) { throw new IllegalArgumentException ( "Exception while escaping URI: Bad encoding '" + encoding + "'" , e ) ; } for ( final byte b : charAsBytes ) { strBuilder . append ( '%' ) ; strBuilder . append ( printHexa ( b ) ) ; } } /* * Final cleaning : return the original String object if no escape was actually needed . Otherwise
* append the remaining unescaped text to the string builder and return . */
if ( strBuilder == null ) { return text ; } if ( max - readOffset > 0 ) { strBuilder . append ( text , readOffset , max ) ; } return strBuilder . toString ( ) ; |
public class DaoService { /** * { @ inheritDoc }
* @ see org . esupportail . smsuapi . dao . DaoService # getSms ( org . esupportail . smsuapi . dao . beans . Application , int , java . lang . String ) */
@ SuppressWarnings ( "unchecked" ) public List < Sms > getSms ( final Application app , final int id , final String phoneNumber ) { } } | DetachedCriteria criteria = DetachedCriteria . forClass ( Sms . class ) ; criteria . add ( Restrictions . eq ( Sms . PROP_INITIAL_ID , id ) ) ; criteria . add ( Restrictions . eq ( Sms . PROP_APP , app ) ) ; criteria . add ( Restrictions . eq ( Sms . PROP_PHONE , phoneNumber ) ) ; return getHibernateTemplate ( ) . findByCriteria ( criteria ) ; |
public class TransformerHandlerImpl { /** * Report an element type declaration .
* < p > The content model will consist of the string " EMPTY " , the
* string " ANY " , or a parenthesised group , optionally followed
* by an occurrence indicator . The model will be normalized so
* that all whitespace is removed , and will include the enclosing
* parentheses . < / p >
* @ param name The element type name .
* @ param model The content model as a normalized string .
* @ throws SAXException The application may raise an exception . */
public void elementDecl ( String name , String model ) throws SAXException { } } | if ( DEBUG ) System . out . println ( "TransformerHandlerImpl#elementDecl: " + name + ", " + model ) ; if ( null != m_declHandler ) { m_declHandler . elementDecl ( name , model ) ; } |
public class GConvolveImageOps { /** * Performs a 2D convolution across the image while re - normalizing the kernel depending on its
* overlap with the image .
* @ param input The original image . Not modified .
* @ param output Where the resulting image is written to . Modified .
* @ param kernel The kernel that is being convolved . Not modified . */
public static < T extends ImageBase < T > , K extends Kernel2D > void convolveNormalized ( K kernel , T input , T output ) { } } | switch ( input . getImageType ( ) . getFamily ( ) ) { case GRAY : { if ( input instanceof GrayF32 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_F32 ) kernel , ( GrayF32 ) input , ( GrayF32 ) output ) ; } else if ( input instanceof GrayF64 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_F64 ) kernel , ( GrayF64 ) input , ( GrayF64 ) output ) ; } else if ( input instanceof GrayU8 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_S32 ) kernel , ( GrayU8 ) input , ( GrayI8 ) output ) ; } else if ( input instanceof GrayS16 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_S32 ) kernel , ( GrayS16 ) input , ( GrayI16 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image type: " + input . getClass ( ) . getName ( ) ) ; } } break ; case INTERLEAVED : { if ( input instanceof InterleavedF32 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_F32 ) kernel , ( InterleavedF32 ) input , ( InterleavedF32 ) output ) ; } else if ( input instanceof InterleavedF64 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_F64 ) kernel , ( InterleavedF64 ) input , ( InterleavedF64 ) output ) ; } else if ( input instanceof InterleavedU8 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_S32 ) kernel , ( InterleavedU8 ) input , ( InterleavedI8 ) output ) ; } else if ( input instanceof InterleavedS16 ) { ConvolveImageNormalized . convolve ( ( Kernel2D_S32 ) kernel , ( InterleavedS16 ) input , ( InterleavedI16 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image type: " + input . getClass ( ) . getName ( ) ) ; } } break ; case PLANAR : { Planar inp = ( Planar ) input ; Planar outp = ( Planar ) output ; for ( int i = 0 ; i < inp . getNumBands ( ) ; i ++ ) { convolveNormalized ( kernel , inp . getBand ( i ) , outp . getBand ( i ) ) ; } } break ; default : throw new IllegalArgumentException ( "Unknown image family" ) ; } |
public class Table { /** * Add XML to content . xml
* @ param util an util
* @ param appendable the output
* @ throws IOException if the XML could not be written */
public void appendXMLToContentEntry ( final XMLUtil util , final Appendable appendable ) throws IOException { } } | this . appender . appendXMLToContentEntry ( util , appendable ) ; |
public class MediaFormatBuilder { /** * Custom properties that my be used by application - specific markup builders or processors .
* @ param map Property map . Is merged with properties already set in builder .
* @ return this */
public @ NotNull MediaFormatBuilder properties ( Map < String , Object > map ) { } } | if ( map == null ) { throw new IllegalArgumentException ( "Map argument must not be null." ) ; } this . properties . putAll ( map ) ; return this ; |
public class BlueAnnotationViewGenerator { /** * Automatically generates a style map file for the given analysis engine
* metadata . The style map will be written to the file
* < code > aStyleMapFile < / code > .
* @ param aMetaData
* Metadata of the Analysis Engine whose outputs will be viewed
* using the generated style map .
* @ param aStyleMapFile
* file to which autogenerated style map will be written */
public void autoGenerateStyleMapFile ( AnalysisEngineMetaData aMetaData , File aStyleMapFile ) throws IOException { } } | String xmlStr = autoGenerateStyleMap ( aMetaData ) ; FileWriter out = null ; try { out = new FileWriter ( aStyleMapFile ) ; out . write ( xmlStr ) ; } finally { if ( out != null ) out . close ( ) ; } |
public class DataSet { /** * Runs a { @ link CustomUnaryOperation } on the data set . Custom operations are typically complex
* operators that are composed of multiple steps .
* @ param operation The operation to run .
* @ return The data set produced by the operation . */
public < X > DataSet < X > runOperation ( CustomUnaryOperation < T , X > operation ) { } } | Validate . notNull ( operation , "The custom operator must not be null." ) ; operation . setInput ( this ) ; return operation . createResult ( ) ; |
public class StaticFilesConfiguration { /** * Configures location for static resources
* @ param folder the location */
public synchronized void configure ( String folder ) { } } | Assert . notNull ( folder , "'folder' must not be null" ) ; if ( ! staticResourcesSet ) { if ( staticResourceHandlers == null ) { staticResourceHandlers = new ArrayList < > ( ) ; } staticResourceHandlers . add ( new ClassPathResourceHandler ( folder , "index.html" ) ) ; LOG . info ( "StaticResourceHandler configured with folder = " + folder ) ; staticResourcesSet = true ; } |
public class JsResources { /** * check if script is already in the header .
* @ return true if it exists */
public static boolean isInHeader ( final String scriptname ) { } } | final NodeList scriptList = Browser . getDocument ( ) . getHead ( ) . getElementsByTagName ( JsResources . TAG_TYPE ) ; for ( int i = 0 ; i < scriptList . getLength ( ) ; i ++ ) { final ScriptElement scriptTag = ( ScriptElement ) scriptList . item ( i ) ; if ( StringUtils . contains ( scriptTag . getSrc ( ) , scriptname ) ) { return true ; } } return false ; |
public class DOM3TreeWalker { /** * Taken from org . apache . xerces . dom . CoreDocumentImpl
* Check the string against XML ' s definition of acceptable names for
* elements and attributes and so on using the XMLCharacterProperties
* utility class */
protected boolean isXMLName ( String s , boolean xml11Version ) { } } | if ( s == null ) { return false ; } if ( ! xml11Version ) return XMLChar . isValidName ( s ) ; else return XML11Char . isXML11ValidName ( s ) ; |
public class EvaluationResult { /** * The individual results of the simulation of the API operation specified in EvalActionName on each resource .
* @ param resourceSpecificResults
* The individual results of the simulation of the API operation specified in EvalActionName on each
* resource . */
public void setResourceSpecificResults ( java . util . Collection < ResourceSpecificResult > resourceSpecificResults ) { } } | if ( resourceSpecificResults == null ) { this . resourceSpecificResults = null ; return ; } this . resourceSpecificResults = new com . amazonaws . internal . SdkInternalList < ResourceSpecificResult > ( resourceSpecificResults ) ; |
public class State { /** * Set a property .
* Both key and value are stored as strings .
* @ param key property key
* @ param value property value */
public void setProp ( String key , Object value ) { } } | this . specProperties . put ( key , value . toString ( ) ) ; |
public class LeftJoinNodeImpl { /** * When at least one value does not depend on a right - specific variable
* ( i . e . is a ground term or only depends on left variables ) */
private Optional < RightProvenance > createProvenanceElements ( IQTree rightTree , ImmutableSubstitution < ? extends ImmutableTerm > selectedSubstitution , ImmutableSet < Variable > leftVariables , VariableGenerator variableGenerator ) { } } | if ( selectedSubstitution . getImmutableMap ( ) . entrySet ( ) . stream ( ) . filter ( e -> ! leftVariables . contains ( e . getKey ( ) ) ) . map ( Map . Entry :: getValue ) . anyMatch ( value -> value . getVariableStream ( ) . allMatch ( leftVariables :: contains ) || value . isGround ( ) ) ) { VariableNullability rightVariableNullability = rightTree . getVariableNullability ( ) ; Optional < Variable > nonNullableRightVariable = rightTree . getVariables ( ) . stream ( ) . filter ( v -> ! leftVariables . contains ( v ) ) . filter ( v -> ! rightVariableNullability . isPossiblyNullable ( v ) ) . findFirst ( ) ; if ( nonNullableRightVariable . isPresent ( ) ) { return Optional . of ( new RightProvenance ( nonNullableRightVariable . get ( ) ) ) ; } /* * Otherwise , creates a fresh variable and its construction node */
else { Variable provenanceVariable = variableGenerator . generateNewVariable ( ) ; ImmutableSet < Variable > newRightProjectedVariables = Stream . concat ( Stream . of ( provenanceVariable ) , rightTree . getVariables ( ) . stream ( ) ) . collect ( ImmutableCollectors . toSet ( ) ) ; ConstructionNode newRightConstructionNode = iqFactory . createConstructionNode ( newRightProjectedVariables , substitutionFactory . getSubstitution ( provenanceVariable , termFactory . getProvenanceSpecialConstant ( ) ) ) ; return Optional . of ( new RightProvenance ( provenanceVariable , newRightConstructionNode ) ) ; } } else { return Optional . empty ( ) ; } |
public class Type { /** * Returns a new type from this one that represents a primitive type .
* If this type cannot be represented by a primitive , then this is
* returned . */
public Type toPrimitive ( ) { } } | if ( mPrimitive ) { return this ; } else { Class < ? > primitive = convertToPrimitive ( mObjectClass ) ; if ( primitive . isPrimitive ( ) ) { return new Type ( primitive ) ; } else { return new Type ( mGenericType , primitive ) ; } } |
public class Tile { /** * Removes the given Section from the list of sections .
* Sections in the Medusa library
* usually are less eye - catching than Areas .
* @ param SECTION */
public void removeSection ( final Section SECTION ) { } } | if ( null == SECTION ) return ; getSections ( ) . remove ( SECTION ) ; getSections ( ) . sort ( new SectionComparator ( ) ) ; fireTileEvent ( SECTION_EVENT ) ; |
public class ApikeyManager { /** * / * serialize now */
public String serializeNowFromMap ( @ Nullable String user , long duration , @ Nullable Arr roles , Map < String , Object > nameAndValMap ) { } } | return createNowFromMap ( user , duration , roles , nameAndValMap ) . serialize ( ) ; |
public class ComplexNumber { /** * Adds two complex numbers .
* @ param z1 Complex Number .
* @ param z2 Complex Number .
* @ return Returns new ComplexNumber instance containing the sum of specified complex numbers . */
public static ComplexNumber Add ( ComplexNumber z1 , ComplexNumber z2 ) { } } | return new ComplexNumber ( z1 . real + z2 . real , z1 . imaginary + z2 . imaginary ) ; |
public class URL { /** * Builds a String representation of this URL . This method works around
* double - escaping when passing the result of
* { @ link QueryString # toQueryString ( ) } to the { @ link URI } constructor .
* @ return A string representation of this URL .
* @ throws URISyntaxException If an error occurs in parsing the URL . */
private String build ( ) throws URISyntaxException { } } | URI uri = new URI ( protocol , userInfo , host , port , path , null , null ) ; StringBuilder url = new StringBuilder ( uri . toString ( ) ) ; if ( query != null && query . size ( ) > 0 ) { url . append ( "?" ) ; url . append ( query . toQueryString ( ) ) ; } if ( StringUtils . isNotBlank ( fragment ) ) { url . append ( "#" ) ; url . append ( fragment ) ; } return url . toString ( ) ; |
public class ConfigBase { /** * Set a property
* @ param list the list - possibly null
* @ param name of property
* @ param val of property
* @ return possibly newly created list */
@ SuppressWarnings ( "unchecked" ) public < L extends List > L setListProperty ( final L list , final String name , final String val ) { } } | removeProperty ( list , name ) ; return addListProperty ( list , name , val ) ; |
public class HldAccEntitiesProcessorNames { /** * < p > Get processor name for FOL delete . < / p >
* @ param pClass a Class
* @ return a thing */
protected final String getForFolDelete ( final Class < ? > pClass ) { } } | if ( PurchaseInvoiceServiceLine . class == pClass ) { return PrcPurchaseInvoiceServiceLineDelete . class . getSimpleName ( ) ; } else if ( SalesInvoiceServiceLine . class == pClass ) { return PrcSalesInvoiceServiceLineDelete . class . getSimpleName ( ) ; } else if ( IInvoiceLine . class . isAssignableFrom ( pClass ) ) { return null ; } else if ( WageTaxLine . class == pClass ) { return PrcWageTaxLineDelete . class . getSimpleName ( ) ; } else if ( InvItemTaxCategoryLine . class == pClass ) { return PrcInvItemTaxCategoryLineDelete . class . getSimpleName ( ) ; } else if ( AdditionCostLine . class == pClass ) { return PrcAdditionCostLineDelete . class . getSimpleName ( ) ; } else if ( WageLine . class == pClass ) { return PrcWageLineDelete . class . getSimpleName ( ) ; } else if ( Eattachment . class == pClass ) { return PrcEntityFfolDelete . class . getSimpleName ( ) ; } else if ( ADocTaxLine . class . isAssignableFrom ( pClass ) ) { return null ; } else { if ( this . hldAddEntitiesProcessorNames != null ) { String name = this . hldAddEntitiesProcessorNames . getForFolDelete ( pClass ) ; if ( name != null ) { return name ; } } return PrcEntityFolDelete . class . getSimpleName ( ) ; } |
public class HashUtil { /** * 对字符串进行散列 , 支持md5与sha1算法 . */
private static byte [ ] digest ( @ NotNull byte [ ] input , MessageDigest digest , byte [ ] salt , int iterations ) { } } | // 带盐
if ( salt != null ) { digest . update ( salt ) ; } // 第一次散列
byte [ ] result = digest . digest ( input ) ; // 如果迭代次数 > 1 , 进一步迭代散列
for ( int i = 1 ; i < iterations ; i ++ ) { digest . reset ( ) ; result = digest . digest ( result ) ; } return result ; |
public class Utils { /** * Copy the type parameters from a JvmOperation .
* < p > This function differs from { @ link XtendJvmModelInferrer # copyAndFixTypeParameters ( List ,
* org . eclipse . xtext . common . types . JvmTypeParameterDeclarator ) }
* and { @ link XtendJvmModelInferrer # copyTypeParameters ( List , org . eclipse . xtext . common . types . JvmTypeParameterDeclarator ) }
* in the fact that the type parameters were already generated and fixed . The current function supper generic types by
* clone the types references with { @ link # cloneWithTypeParametersAndProxies ( JvmTypeReference , Iterable , Map , JvmTypeReferenceBuilder ,
* JvmTypesBuilder , TypeReferences , TypesFactory ) } .
* @ param fromOperation the operation from which the type parameters are copied .
* @ param toOperation the operation that will receives the new type parameters .
* @ param typeParameterBuilder the builder if type parameter .
* @ param typeBuilder the builder of type .
* @ param typeReferences the builder of type references .
* @ param jvmTypesFactory the factory of Jvm types .
* @ since 0.6 */
public static void copyTypeParametersFromJvmOperation ( JvmOperation fromOperation , JvmOperation toOperation , JvmTypeReferenceBuilder typeParameterBuilder , JvmTypesBuilder typeBuilder , TypeReferences typeReferences , TypesFactory jvmTypesFactory ) { } } | // Get the type parameter mapping that is a consequence of the super type extension within the container .
final Map < String , JvmTypeReference > superTypeParameterMapping = new HashMap < > ( ) ; Utils . getSuperTypeParameterMap ( toOperation . getDeclaringType ( ) , superTypeParameterMapping ) ; copyTypeParametersFromJvmOperation ( fromOperation . getTypeParameters ( ) , toOperation . getTypeParameters ( ) , superTypeParameterMapping , typeParameterBuilder , typeBuilder , typeReferences , jvmTypesFactory ) ; |
public class HostMessenger { /** * Given a hostid , return the hostname for it */
@ Override public String getHostnameForHostID ( int hostId ) { } } | if ( hostId == m_localHostId ) { return CoreUtils . getHostnameOrAddress ( ) ; } Iterator < ForeignHost > it = m_foreignHosts . get ( hostId ) . iterator ( ) ; if ( it . hasNext ( ) ) { ForeignHost fh = it . next ( ) ; return fh . hostname ( ) ; } return m_knownFailedHosts . get ( hostId ) != null ? m_knownFailedHosts . get ( hostId ) : "UNKNOWN" ; |
public class FluentFunctions { /** * Construct a FluentBiFunction from a CheckedBiFunction
* < pre >
* { @ code
* FluentFunctions . ofChecked ( this : : exceptionalFirstTime )
* . println ( )
* . retry ( 2,500)
* . applyHKT ( " hello " , " woo ! " )
* < / pre >
* @ param fn CheckedBiFunction
* @ return FluentBiFunction */
public static < T1 , T2 , R > FluentFunctions . FluentBiFunction < T1 , T2 , R > ofChecked ( final CheckedBiFunction < T1 , T2 , R > fn ) { } } | return FluentFunctions . of ( ExceptionSoftener . softenBiFunction ( fn ) ) ; |
public class SchemaManager { /** * Returns the table that has an index with the given name and schema . */
Table findUserTableForIndex ( Session session , String name , String schemaName ) { } } | Schema schema = ( Schema ) schemaMap . get ( schemaName ) ; HsqlName indexName = schema . indexLookup . getName ( name ) ; if ( indexName == null ) { return null ; } return findUserTable ( session , indexName . parent . name , schemaName ) ; |
public class TrainingsImpl { /** * Get the tags for a given project and iteration .
* @ param projectId The project id
* @ param getTagsOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the List & lt ; Tag & gt ; object if successful . */
public List < Tag > getTags ( UUID projectId , GetTagsOptionalParameter getTagsOptionalParameter ) { } } | return getTagsWithServiceResponseAsync ( projectId , getTagsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class SMSChannelRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SMSChannelRequest sMSChannelRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( sMSChannelRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sMSChannelRequest . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( sMSChannelRequest . getSenderId ( ) , SENDERID_BINDING ) ; protocolMarshaller . marshall ( sMSChannelRequest . getShortCode ( ) , SHORTCODE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class KAMStoreImpl { /** * { @ inheritDoc } */
@ Override public List < KamNode > getKamNodes ( final Kam k , final SkinnyUUID uuid ) { } } | if ( uuid == null ) throw new InvalidArgument ( "uuid" , uuid ) ; if ( ! exists ( k ) ) return null ; List < Integer > ids ; try { ids = kamStoreDao ( k . getKamInfo ( ) ) . getKamNodeCandidates ( uuid ) ; List < KamNode > kamNodeList = new ArrayList < KamNode > ( ) ; for ( Integer kamNodeId : ids ) { kamNodeList . add ( k . findNode ( kamNodeId ) ) ; } return kamNodeList ; } catch ( SQLException e ) { final String fmt = "error getting KAM nodes for %s" ; final String msg = format ( fmt , k . getKamInfo ( ) . getName ( ) ) ; throw new KAMStoreException ( msg , e ) ; } |
public class AmqpChannel { /** * This method unbinds a queue from an exchange .
* @ param queue
* @ param exchange
* @ param routingKey
* @ param arguments
* @ return AmqpChannel */
public AmqpChannel unbindQueue ( String queue , String exchange , String routingKey , AmqpArguments arguments ) { } } | Object [ ] args = { 0 , queue , exchange , routingKey , arguments } ; WrappedByteBuffer bodyArg = null ; HashMap < String , Object > headersArg = null ; String methodName = "unbindQueue" ; String methodId = "50" + "50" ; AmqpMethod amqpMethod = MethodLookup . LookupMethod ( methodId ) ; Object [ ] methodArguments = { this , amqpMethod , this . id , args , bodyArg , headersArg } ; asyncClient . enqueueAction ( methodName , "channelWrite" , methodArguments , null , null ) ; return this ; |
public class QueryInfo { /** * Returns a < code > FieldInfo < / code > object .
* @ param selectFields
* Returns a < code > FieldInfo < / code > object . */
public void setSelectFields ( java . util . Collection < FieldInfo > selectFields ) { } } | if ( selectFields == null ) { this . selectFields = null ; return ; } this . selectFields = new com . amazonaws . internal . SdkInternalList < FieldInfo > ( selectFields ) ; |
public class EventClient { /** * Sends a synchronous create event request to the API .
* @ param event an instance of { @ link Event } that will be turned into a request
* @ return event ID from the server
* @ throws ExecutionException indicates an error in the HTTP backend
* @ throws InterruptedException indicates an interruption during the HTTP operation
* @ throws IOException indicates an error from the API response */
public String createEvent ( Event event ) throws ExecutionException , InterruptedException , IOException { } } | return createEvent ( createEventAsFuture ( event ) ) ; |
public class CompareFileFilter { /** * Set the field or file that owns this listener .
* Besides inherited , this method closes the owner record .
* @ param owner My owner . */
public void setOwner ( ListenerOwner owner ) { } } | super . setOwner ( owner ) ; if ( owner == null ) return ; if ( ( m_strFieldNameToCheck != null ) && ( m_strFieldNameToCheck . length ( ) > 0 ) ) if ( ( m_fldToCheck == null ) || ( m_fldToCheck . getFieldName ( ) == null ) || ( m_fldToCheck . getFieldName ( ) . length ( ) == 0 ) ) m_fldToCheck = this . getOwner ( ) . getField ( m_strFieldNameToCheck ) ; // If you have the fieldname , but not the field , get the field .
if ( m_fldToCompare != null ) if ( m_fldToCompare . getRecord ( ) != this . getOwner ( ) ) m_fldToCompare . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; if ( m_fldToCheck != null ) if ( m_fldToCheck . getRecord ( ) != this . getOwner ( ) ) // If field is not in this file , remember to remove it
m_fldToCheck . addListener ( new FieldRemoveBOnCloseHandler ( this ) ) ; // x this . getOwner ( ) . close ( ) ; / / Must requery after setting dependent fields ! |
public class WorkerPoolImpl { /** * Handle an exception that was thrown from inside { @ link # poll ( ) } .
* @ param curQueue the name of the queue that was being processed when the exception was thrown
* @ param ex the exception that was thrown */
protected void recoverFromException ( final String curQueue , final Exception ex ) { } } | final RecoveryStrategy recoveryStrategy = this . exceptionHandlerRef . get ( ) . onException ( this , ex , curQueue ) ; switch ( recoveryStrategy ) { case RECONNECT : if ( ex instanceof JedisNoScriptException ) { LOG . info ( "Got JedisNoScriptException while reconnecting, reloading Redis scripts" ) ; loadRedisScripts ( ) ; } else { LOG . info ( "Waiting " + RECONNECT_SLEEP_TIME + "ms for pool to reconnect to redis" , ex ) ; try { Thread . sleep ( RECONNECT_SLEEP_TIME ) ; } catch ( InterruptedException e ) { } } break ; case TERMINATE : LOG . warn ( "Terminating in response to exception" , ex ) ; end ( false ) ; break ; case PROCEED : this . listenerDelegate . fireEvent ( WORKER_ERROR , this , curQueue , null , null , null , ex ) ; break ; default : LOG . error ( "Unknown RecoveryStrategy: " + recoveryStrategy + " while attempting to recover from the following exception; worker proceeding..." , ex ) ; break ; } |
public class CmsLinkRewriter { /** * Checks if a path belongs to one of the sources . < p >
* @ param path a root path
* @ return true if the path belongs to the sources */
protected boolean isInSources ( String path ) { } } | for ( CmsPair < String , String > sourceTargetPair : m_sourceTargetPairs ) { String source = sourceTargetPair . getFirst ( ) ; if ( CmsStringUtil . joinPaths ( path , "/" ) . startsWith ( CmsStringUtil . joinPaths ( source , "/" ) ) ) { return true ; } } return false ; |
public class DoubleClickCrypto { /** * Creates the initialization vector from component { @ code ( timestamp , serverId ) } fields .
* @ param timestamp Timestamp subfield . Notice that Data is not ideal for this because it ' s
* limited to millisecond precision , which leaves leave some bits unused in the init vector
* @ param serverId Server ID subfield ( whatever a server uses as a public ID , e . g . its IPv4)
* @ return initialization vector
* @ see # createInitVector ( long , long ) */
public byte [ ] createInitVector ( @ Nullable Date timestamp , long serverId ) { } } | return createInitVector ( timestamp == null ? 0L : millisToSecsAndMicros ( timestamp . getTime ( ) ) , serverId ) ; |
public class FrameDataflowAnalysis { /** * Merge one frame into another .
* @ param other
* the frame to merge with the result
* @ param result
* the result frame , which is modified to be the merge of the two
* frames */
protected void mergeInto ( FrameType other , FrameType result ) throws DataflowAnalysisException { } } | // Handle if result Frame or the other Frame is the special " TOP " value .
if ( result . isTop ( ) ) { // Result is the identity element , so copy the other Frame
result . copyFrom ( other ) ; return ; } else if ( other . isTop ( ) ) { // Other Frame is the identity element , so result stays the same
return ; } // Handle if result Frame or the other Frame is the special " BOTTOM "
// value .
if ( result . isBottom ( ) ) { // Result is the bottom element , so it stays that way
return ; } else if ( other . isBottom ( ) ) { // Other Frame is the bottom element , so result becomes the bottom
// element too
result . setBottom ( ) ; return ; } // If the number of slots in the Frames differs ,
// then the result is the special " BOTTOM " value .
if ( result . getNumSlots ( ) != other . getNumSlots ( ) ) { result . setBottom ( ) ; return ; } // Usual case : ordinary Frames consisting of the same number of values .
// Merge each value in the two slot lists element - wise .
for ( int i = 0 ; i < result . getNumSlots ( ) ; ++ i ) { mergeValues ( other , result , i ) ; } |
public class SentinelServersConfig { /** * Add Redis Sentinel node address in host : port format . Multiple nodes at once could be added .
* @ param addresses of Redis
* @ return config */
public SentinelServersConfig addSentinelAddress ( String ... addresses ) { } } | for ( String address : addresses ) { sentinelAddresses . add ( URIBuilder . create ( address ) ) ; } return this ; |
public class CFMXCompat { /** * returns true if the passed value is empty or is CFMX _ COMPAT */
public static boolean isCfmxCompat ( String algorithm ) { } } | if ( StringUtil . isEmpty ( algorithm , true ) ) return true ; return algorithm . equalsIgnoreCase ( CFMXCompat . ALGORITHM_NAME ) ; |
public class ProjectResourceSpreadType { /** * Gets the value of the period property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the period property .
* For example , to add a new item , do as follows :
* < pre >
* getPeriod ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link ProjectResourceSpreadType . Period } */
public List < ProjectResourceSpreadType . Period > getPeriod ( ) { } } | if ( period == null ) { period = new ArrayList < ProjectResourceSpreadType . Period > ( ) ; } return this . period ; |
public class AmazonIdentityManagementClient { /** * Replaces the existing list of server certificate thumbprints associated with an OpenID Connect ( OIDC ) provider
* resource object with a new list of thumbprints .
* The list that you pass with this operation completely replaces the existing list of thumbprints . ( The lists are
* not merged . )
* Typically , you need to update a thumbprint only when the identity provider ' s certificate changes , which occurs
* rarely . However , if the provider ' s certificate < i > does < / i > change , any attempt to assume an IAM role that
* specifies the OIDC provider as a principal fails until the certificate thumbprint is updated .
* < note >
* Trust for the OIDC provider is derived from the provider ' s certificate and is validated by the thumbprint .
* Therefore , it is best to limit access to the < code > UpdateOpenIDConnectProviderThumbprint < / code > operation to
* highly privileged users .
* < / note >
* @ param updateOpenIDConnectProviderThumbprintRequest
* @ return Result of the UpdateOpenIDConnectProviderThumbprint operation returned by the service .
* @ throws InvalidInputException
* The request was rejected because an invalid or out - of - range value was supplied for an input parameter .
* @ throws NoSuchEntityException
* The request was rejected because it referenced a resource entity that does not exist . The error message
* describes the resource .
* @ throws ServiceFailureException
* The request processing has failed because of an unknown error , exception or failure .
* @ sample AmazonIdentityManagement . UpdateOpenIDConnectProviderThumbprint
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / UpdateOpenIDConnectProviderThumbprint "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public UpdateOpenIDConnectProviderThumbprintResult updateOpenIDConnectProviderThumbprint ( UpdateOpenIDConnectProviderThumbprintRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateOpenIDConnectProviderThumbprint ( request ) ; |
public class AnnotationsUtil { /** * Check if the annotation is present and if not throws an exception ,
* this is just an overload for more clear naming .
* @ param annotatedType The source type to tcheck the annotation on
* @ param annotationClass The annotation to look for
* @ param < T > The annotation subtype
* @ return The annotation that was requested
* @ throws ODataSystemException If unable to find the annotation or nullpointer in case null source was specified */
public static < T extends Annotation > T checkAnnotationPresent ( AnnotatedElement annotatedType , Class < T > annotationClass ) { } } | return getAnnotation ( annotatedType , annotationClass ) ; |
public class TranslateSnippetsBeta { /** * Translates a given text to a target language .
* @ param projectId - Id of the project .
* @ param location - location name .
* @ param text - Text for translation .
* @ param sourceLanguageCode - Language code of text . e . g . " en "
* @ param targetLanguageCode - Language code for translation . e . g . " sr " */
static TranslateTextResponse translateText ( String projectId , String location , String text , String sourceLanguageCode , String targetLanguageCode ) { } } | try ( TranslationServiceClient translationServiceClient = TranslationServiceClient . create ( ) ) { LocationName locationName = LocationName . newBuilder ( ) . setProject ( projectId ) . setLocation ( location ) . build ( ) ; TranslateTextRequest translateTextRequest = TranslateTextRequest . newBuilder ( ) . setParent ( locationName . toString ( ) ) . setMimeType ( "text/plain" ) . setSourceLanguageCode ( sourceLanguageCode ) . setTargetLanguageCode ( targetLanguageCode ) . addContents ( text ) . build ( ) ; // Call the API
TranslateTextResponse response = translationServiceClient . translateText ( translateTextRequest ) ; System . out . format ( "Translated Text: %s" , response . getTranslationsList ( ) . get ( 0 ) . getTranslatedText ( ) ) ; return response ; } catch ( Exception e ) { throw new RuntimeException ( "Couldn't create client." , e ) ; } |
public class SemanticWarning { /** * { @ inheritDoc } */
@ Override public String getUserFacingMessage ( ) { } } | final StringBuilder bldr = new StringBuilder ( ) ; if ( signatureStatus != null && signatureStatus . size ( ) > 1 ) { bldr . append ( "SEMANTIC WARNINGS" ) ; } else { bldr . append ( "SEMANTIC WARNING" ) ; } final String name = getName ( ) ; if ( name != null ) { bldr . append ( " in " ) ; bldr . append ( name ) ; } bldr . append ( "\n\treason: " ) ; final String msg = getMessage ( ) ; if ( msg != null ) { bldr . append ( msg ) ; } else { bldr . append ( "Unknown" ) ; } bldr . append ( "\n" ) ; if ( signatureStatus != null ) { // Signature status is non - null , suppliedSignature is as well
bldr . append ( "\tsignature: " ) ; bldr . append ( suppliedSignature ) ; bldr . append ( "\n" ) ; bldr . append ( "\tfunction signatures: " ) ; bldr . append ( signatureStatus . size ( ) ) ; bldr . append ( "\n" ) ; final Set < Entry < Signature , SemanticStatus > > entrySet = signatureStatus . entrySet ( ) ; for ( final Entry < Signature , SemanticStatus > entry : entrySet ) { Signature sig = entry . getKey ( ) ; SemanticStatus status = entry . getValue ( ) ; bldr . append ( "\t\t" ) ; bldr . append ( status ) ; bldr . append ( " for signature " ) ; bldr . append ( sig ) ; bldr . append ( "\n" ) ; } } return bldr . toString ( ) ; |
public class AbstractCoalescingBufferQueue { /** * Compose { @ code cumulation } and { @ code next } into a new { @ link ByteBufAllocator # ioBuffer ( ) } .
* @ param alloc The allocator to use to allocate the new buffer .
* @ param cumulation The current cumulation .
* @ param next The next buffer .
* @ return The result of { @ code cumulation + next } . */
protected final ByteBuf copyAndCompose ( ByteBufAllocator alloc , ByteBuf cumulation , ByteBuf next ) { } } | ByteBuf newCumulation = alloc . ioBuffer ( cumulation . readableBytes ( ) + next . readableBytes ( ) ) ; try { newCumulation . writeBytes ( cumulation ) . writeBytes ( next ) ; } catch ( Throwable cause ) { newCumulation . release ( ) ; safeRelease ( next ) ; throwException ( cause ) ; } cumulation . release ( ) ; next . release ( ) ; return newCumulation ; |
public class XMLSerializer { /** * Printable .
* @ param ch the ch
* @ return the string */
protected static final String printable ( char ch ) { } } | StringBuffer retval = new StringBuffer ( ) ; addPrintable ( retval , ch ) ; return retval . toString ( ) ; |
public class VariableEvaluator { /** * Returns the raw value object of the variable , or null if the property
* does not exist .
* @ param variable the variable name */
private Object getPropertyObject ( String variable , EvaluationContext context , boolean ignoreWarnings , boolean useEnvironment ) throws ConfigEvaluatorException { } } | Object realValue = null ; // checked if we already looked up the value
if ( context . containsValue ( variable ) ) { // get it from cache
realValue = context . getValue ( variable ) ; } else if ( XMLConfigConstants . CFG_SERVICE_PID . equals ( variable ) ) { try { realValue = configEvaluator . getPid ( context . getConfigElement ( ) . getConfigID ( ) ) ; context . putValue ( XMLConfigConstants . CFG_SERVICE_PID , realValue ) ; } catch ( ConfigNotFoundException ex ) { throw new ConfigEvaluatorException ( "Could not obtain PID for configID" , ex ) ; } } else { // evaluate the variable
context . push ( variable ) ; realValue = lookupVariableFromRegistry ( variable ) ; if ( realValue == null ) { // Try checking the properties . This will pick up already evaluated attributes , including flattened config
realValue = context . getProperties ( ) . get ( variable ) ; if ( realValue == null ) { // check if this is an metatype attribute
ExtendedAttributeDefinition attributeDef = context . getAttributeDefinition ( variable ) ; if ( attributeDef != null ) { String currentAttribute = context . getAttributeName ( ) ; // Get the nested info here , then set it later so that evaluating the
// metatype attribute here doesn ' t affect it
Set < NestedInfo > nestedInfo = context . getNestedInfo ( ) ; String flatPrefix = "" ; try { realValue = configEvaluator . evaluateMetaTypeAttribute ( variable , context , attributeDef , flatPrefix , ignoreWarnings ) ; } finally { context . setAttributeName ( currentAttribute ) ; context . setNestedInfo ( nestedInfo ) ; } } else { // check if this is just an attribute
ConfigElement configElement = context . getConfigElement ( ) ; Object rawValue = configElement . getAttribute ( variable ) ; if ( rawValue != null ) { String currentAttribute = context . getAttributeName ( ) ; Set < NestedInfo > nestedInfo = context . getNestedInfo ( ) ; try { realValue = configEvaluator . evaluateSimpleAttribute ( variable , rawValue , context , "" , ignoreWarnings ) ; } finally { context . setAttributeName ( currentAttribute ) ; context . setNestedInfo ( nestedInfo ) ; } } } } if ( realValue == null ) { // Check if this variable points to an unevaluated flattened config
Map < String , ExtendedAttributeDefinition > attributeMap = context . getAttributeMap ( ) ; if ( attributeMap != null ) { for ( Map . Entry < String , ExtendedAttributeDefinition > entry : attributeMap . entrySet ( ) ) { if ( ! context . isProcessed ( entry . getKey ( ) ) && entry . getValue ( ) . isFlat ( ) ) { try { configEvaluator . evaluateMetaTypeAttribute ( entry . getKey ( ) , context , entry . getValue ( ) , "" , true ) ; } catch ( ConfigEvaluatorException ex ) { // Ignore - - errors should be generated during main line processing
} } } // Try again now that everything should be evaluated
realValue = context . getProperties ( ) . get ( variable ) ; } } // Try to get an environment variable ( env . MYVAR )
if ( realValue == null && useEnvironment ) { realValue = lookupEnvironmentVariable ( variable ) ; } // Try to get a default value ( < variable name = " var " default = " defaultValue " / > )
if ( realValue == null ) { realValue = lookupDefaultVariable ( variable ) ; } if ( realValue == null ) { // If the value is null , add it to the context so that we don ' t try to evaluate it again .
// If the value is not null here , this is a variable that points to a configuration attribute ,
// so we don ' t want to add it to the variable registry .
context . addDefinedVariable ( variable , null ) ; } } else { // Only add the variable to the context if this is a user defined variable
context . addDefinedVariable ( variable , realValue ) ; } context . pop ( ) ; context . putValue ( variable , realValue ) ; } return realValue ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.