signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LofCalculator { /** * 学習データセットに対象点を追加し 、 学習データセットの構造を更新する 。 < br > * データ保持数が最大値に達していた場合は学習データセットが保持しているもっとも古い点の削除を行い 、 削除したデータのIdを返す 。 * @ param max データ保持数最大値 * @ param addedPoint 対象点 * @ param dataSet 学習データセット * @ return 削除したデータのId 。 削除しなかった場合はnull */ public static String addPointToDataSet ( int max , LofPoint addedPoint , LofDataSet dataSet ) { } }
// データ数が最大に達している場合には古い方からデータの削除を行う boolean dateDelete = false ; if ( max <= dataSet . getDataIdList ( ) . size ( ) ) { dateDelete = true ; } // 削除フラグがONの場合は古いデータから削除を行う 。 String deleteId = null ; if ( dateDelete ) { deleteId = dataSet . getDataIdList ( ) . get ( 0 ) ; dataSet . deleteData ( deleteId ) ; } dataSet . addData ( addedPoint ) ; return deleteId ;
public class CommandArgs { /** * Add an 64 - bit integer ( long ) argument . * @ param n the argument . * @ return the command args . */ public CommandArgs < K , V > add ( long n ) { } }
singularArguments . add ( IntegerArgument . of ( n ) ) ; return this ;
public class RoundedBorder { /** * { @ inheritDoc } */ public Rectangle getInteriorRectangle ( Component c , int x , int y , int width , int height ) { } }
return RoundedBorder . getInteriorRectangle ( c , this , x , y , width , height ) ;
public class KeePassDatabase { /** * Retrieves a KeePassDatabase instance . The instance returned is based on * the given database file and tries to parse the database header of it . * @ param keePassDatabaseFile * a KeePass database file , must not be NULL * @ return a KeePassDatabase */ public static KeePassDatabase getInstance ( File keePassDatabaseFile ) { } }
if ( keePassDatabaseFile == null ) { throw new IllegalArgumentException ( "You must provide a valid KeePass database file." ) ; } InputStream keePassDatabaseStream = null ; try { keePassDatabaseStream = new FileInputStream ( keePassDatabaseFile ) ; return getInstance ( keePassDatabaseStream ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( "The KeePass database file could not be found. You must provide a valid KeePass database file." , e ) ; } finally { if ( keePassDatabaseStream != null ) { try { keePassDatabaseStream . close ( ) ; } catch ( IOException e ) { // Ignore } } }
public class CompletableEventLatchSupport { /** * Notification callback method accepting a connection error . Triggers emission if the gate is open and the current call to * this method is the last expected notification . */ public final void accept ( Throwable throwable ) { } }
if ( GATE_UPDATER . get ( this ) == GATE_CLOSED ) { onDrop ( throwable ) ; return ; } onError ( throwable ) ; onNotification ( ) ;
public class GetShippingLabelRequest { /** * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setJobIds ( java . util . Collection ) } or { @ link # withJobIds ( java . util . Collection ) } if you want to override the * existing values . * @ param jobIds * @ return Returns a reference to this object so that method calls can be chained together . */ public GetShippingLabelRequest withJobIds ( String ... jobIds ) { } }
if ( this . jobIds == null ) { setJobIds ( new com . amazonaws . internal . SdkInternalList < String > ( jobIds . length ) ) ; } for ( String ele : jobIds ) { this . jobIds . add ( ele ) ; } return this ;
public class Interval { /** * Returns order of another interval compared to this one * @ param other Interval to compare with * @ return - 1 if this interval is before the other interval , 1 if this interval is after * 0 otherwise ( may indicate the two intervals are same or not comparable ) */ public int compareIntervalOrder ( Interval < E > other ) { } }
int flags = getRelationFlags ( other ) ; if ( checkFlagExclusiveSet ( flags , REL_FLAGS_INTERVAL_BEFORE , REL_FLAGS_INTERVAL_UNKNOWN ) ) { return - 1 ; } else if ( checkFlagExclusiveSet ( flags , REL_FLAGS_INTERVAL_AFTER , REL_FLAGS_INTERVAL_UNKNOWN ) ) { return 1 ; } else { return 0 ; }
public class LabeledFormComponentPanel { /** * Factory method for creating the new { @ link ComponentFeedbackPanel } . This method is invoked in * the constructor from the derived classes and can be overridden so users can provide their own * version of a new { @ link ComponentFeedbackPanel } . * @ param id * the id * @ param filter * the filter * @ return the new { @ link ComponentFeedbackPanel } */ protected ComponentFeedbackPanel newComponentFeedbackPanel ( final String id , final Component filter ) { } }
return ComponentFactory . newComponentFeedbackPanel ( id , filter ) ;
public class ABOD { /** * Run ABOD on the data set . * @ param relation Relation to process * @ return Outlier detection result */ public OutlierResult run ( Database db , Relation < V > relation ) { } }
ArrayDBIDs ids = DBIDUtil . ensureArray ( relation . getDBIDs ( ) ) ; // Build a kernel matrix , to make O ( n ^ 3 ) slightly less bad . SimilarityQuery < V > sq = db . getSimilarityQuery ( relation , kernelFunction ) ; KernelMatrix kernelMatrix = new KernelMatrix ( sq , relation , ids ) ; WritableDoubleDataStore abodvalues = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmaxabod = new DoubleMinMax ( ) ; MeanVariance s = new MeanVariance ( ) ; DBIDArrayIter pA = ids . iter ( ) , pB = ids . iter ( ) , pC = ids . iter ( ) ; for ( ; pA . valid ( ) ; pA . advance ( ) ) { final double abof = computeABOF ( kernelMatrix , pA , pB , pC , s ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } // Build result representation . DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Angle-Based Outlier Degree" , "abod-outlier" , abodvalues , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta ( minmaxabod . getMin ( ) , minmaxabod . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY ) ; return new OutlierResult ( scoreMeta , scoreResult ) ;
public class LabAccountsInner { /** * List lab accounts in a resource group . * @ param resourceGroupName The name of the resource group . * @ param expand Specify the $ expand query . Example : ' properties ( $ expand = sizeConfiguration ) ' * @ param filter The filter to apply to the operation . * @ param top The maximum number of resources to return from the operation . * @ param orderby The ordering expression for the results , using OData notation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; LabAccountInner & gt ; object */ public Observable < Page < LabAccountInner > > listByResourceGroupAsync ( final String resourceGroupName , final String expand , final String filter , final Integer top , final String orderby ) { } }
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName , expand , filter , top , orderby ) . map ( new Func1 < ServiceResponse < Page < LabAccountInner > > , Page < LabAccountInner > > ( ) { @ Override public Page < LabAccountInner > call ( ServiceResponse < Page < LabAccountInner > > response ) { return response . body ( ) ; } } ) ;
public class GetLifecyclePoliciesRequest { /** * The resource type . * @ param resourceTypes * The resource type . * @ see ResourceTypeValues */ public void setResourceTypes ( java . util . Collection < String > resourceTypes ) { } }
if ( resourceTypes == null ) { this . resourceTypes = null ; return ; } this . resourceTypes = new java . util . ArrayList < String > ( resourceTypes ) ;
public class SVGMorph { /** * Set the " time " index for this morph . This is given in terms of diagrams , so * 0.5f would give you the position half way between the first and second diagrams . * @ param time The time index to represent on this diagrams */ public void setMorphTime ( float time ) { } }
for ( int i = 0 ; i < figures . size ( ) ; i ++ ) { Figure figure = ( Figure ) figures . get ( i ) ; MorphShape shape = ( MorphShape ) figure . getShape ( ) ; shape . setMorphTime ( time ) ; }
public class CmsObject { /** * Returns a list of all siblings of the specified resource , * the specified resource being always part of the result set . < p > * @ param resource the resource * @ param filter a resource filter * @ return a list of < code > { @ link CmsResource } < / code > s that * are siblings to the specified resource , * including the specified resource itself . * @ throws CmsException if something goes wrong */ public List < CmsResource > readSiblings ( CmsResource resource , CmsResourceFilter filter ) throws CmsException { } }
return m_securityManager . readSiblings ( m_context , resource , filter ) ;
public class RuntimeHttpUtils { /** * Returns an URI for the given endpoint . * Prefixes the protocol if the endpoint given does not have it . * @ throws IllegalArgumentException if the inputs are null . */ public static URI toUri ( String endpoint , Protocol protocol ) { } }
if ( endpoint == null ) { throw new IllegalArgumentException ( "endpoint cannot be null" ) ; } /* * If the endpoint doesn ' t explicitly specify a protocol to use , then * we ' ll defer to the default protocol specified in the client * configuration . */ if ( ! endpoint . contains ( "://" ) ) { endpoint = protocol . toString ( ) + "://" + endpoint ; } try { return new URI ( endpoint ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; }
public class ModbusSerialTransport { /** * clearInput - Clear the input if characters are found in the input stream . * @ throws IOException If a problem with the port */ void clearInput ( ) throws IOException { } }
if ( commPort . bytesAvailable ( ) > 0 ) { int len = commPort . bytesAvailable ( ) ; byte buf [ ] = new byte [ len ] ; readBytes ( buf , len ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Clear input: {}" , ModbusUtil . toHex ( buf , 0 , len ) ) ; } }
public class MaxiCode { /** * Returns the primary message codewords for mode 3. * @ param postcode the postal code * @ param country the country code * @ param service the service code * @ return the primary message , as codewords */ private static int [ ] getMode3PrimaryCodewords ( String postcode , int country , int service ) { } }
int [ ] postcodeNums = new int [ postcode . length ( ) ] ; postcode = postcode . toUpperCase ( ) ; for ( int i = 0 ; i < postcodeNums . length ; i ++ ) { postcodeNums [ i ] = postcode . charAt ( i ) ; if ( postcode . charAt ( i ) >= 'A' && postcode . charAt ( i ) <= 'Z' ) { // ( Capital ) letters shifted to Code Set A values postcodeNums [ i ] -= 64 ; } if ( postcodeNums [ i ] == 27 || postcodeNums [ i ] == 31 || postcodeNums [ i ] == 33 || postcodeNums [ i ] >= 59 ) { // Not a valid postal code character , use space instead postcodeNums [ i ] = 32 ; } // Input characters lower than 27 ( NUL - SUB ) in postal code are interpreted as capital // letters in Code Set A ( e . g . LF becomes ' J ' ) } int [ ] primary = new int [ 10 ] ; primary [ 0 ] = ( ( postcodeNums [ 5 ] & 0x03 ) << 4 ) | 3 ; primary [ 1 ] = ( ( postcodeNums [ 4 ] & 0x03 ) << 4 ) | ( ( postcodeNums [ 5 ] & 0x3c ) >> 2 ) ; primary [ 2 ] = ( ( postcodeNums [ 3 ] & 0x03 ) << 4 ) | ( ( postcodeNums [ 4 ] & 0x3c ) >> 2 ) ; primary [ 3 ] = ( ( postcodeNums [ 2 ] & 0x03 ) << 4 ) | ( ( postcodeNums [ 3 ] & 0x3c ) >> 2 ) ; primary [ 4 ] = ( ( postcodeNums [ 1 ] & 0x03 ) << 4 ) | ( ( postcodeNums [ 2 ] & 0x3c ) >> 2 ) ; primary [ 5 ] = ( ( postcodeNums [ 0 ] & 0x03 ) << 4 ) | ( ( postcodeNums [ 1 ] & 0x3c ) >> 2 ) ; primary [ 6 ] = ( ( postcodeNums [ 0 ] & 0x3c ) >> 2 ) | ( ( country & 0x3 ) << 4 ) ; primary [ 7 ] = ( country & 0xfc ) >> 2 ; primary [ 8 ] = ( ( country & 0x300 ) >> 8 ) | ( ( service & 0xf ) << 2 ) ; primary [ 9 ] = ( ( service & 0x3f0 ) >> 4 ) ; return primary ;
public class Instance { /** * < pre > * Zone where the virtual machine is located . Only applicable for instances * in App Engine flexible environment . * & # 64 ; OutputOnly * < / pre > * < code > string vm _ zone _ name = 6 ; < / code > */ public com . google . protobuf . ByteString getVmZoneNameBytes ( ) { } }
java . lang . Object ref = vmZoneName_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; vmZoneName_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class CacheNotifierImpl { /** * Gets a suitable indexing provider for the given indexed filter . * @ param indexedFilter the filter * @ return the FilterIndexingServiceProvider that supports the given IndexedFilter or { @ code null } if none was found */ private FilterIndexingServiceProvider findIndexingServiceProvider ( IndexedFilter indexedFilter ) { } }
if ( filterIndexingServiceProviders != null ) { for ( FilterIndexingServiceProvider provider : filterIndexingServiceProviders ) { if ( provider . supportsFilter ( indexedFilter ) ) { return provider ; } } } log . noFilterIndexingServiceProviderFound ( indexedFilter . getClass ( ) . getName ( ) ) ; return null ;
public class ContextManager { /** * Remove the context with the given ID from the stack . * @ throws IllegalStateException if the given ID does not refer to the top of stack . */ private void removeContext ( final String contextID ) { } }
synchronized ( this . contextStack ) { if ( ! contextID . equals ( this . contextStack . peek ( ) . getIdentifier ( ) ) ) { throw new IllegalStateException ( "Trying to close context with id `" + contextID + "`. But the top context has id `" + this . contextStack . peek ( ) . getIdentifier ( ) + "`" ) ; } this . contextStack . peek ( ) . close ( ) ; if ( this . contextStack . size ( ) > 1 ) { /* We did not close the root context . Therefore , we need to inform the * driver explicitly that this context is closed . The root context notification * is implicit in the Evaluator close / done notification . */ this . heartBeatManager . sendHeartbeat ( ) ; // Ensure Driver gets notified of context DONE state } this . contextStack . pop ( ) ; /* * At this moment , the Evaluator is actually idle and has some time till the Driver sends it additional work . * Also , a potentially large object graph just became orphaned : all the objects instantiated by the context * and service injectors can now be garbage collected . So GC call is justified . */ System . gc ( ) ; }
public class PrcPurchaseReturnLineSave { /** * < p > Setter for utlInvLine . < / p > * @ param pUtlInvLine reference */ public final void setUtlInvLine ( final UtlInvLine < RS , PurchaseReturn , PurchaseReturnLine , PurchaseReturnTaxLine , PurchaseReturnGoodsTaxLine > pUtlInvLine ) { } }
this . utlInvLine = pUtlInvLine ;
public class RadialTextsView { /** * Draw the 12 text values at the positions specified by the textGrid parameters . */ private void drawTexts ( Canvas canvas , float textSize , Typeface typeface , String [ ] texts , float [ ] textGridWidths , float [ ] textGridHeights ) { } }
mPaint . setTextSize ( textSize ) ; mPaint . setTypeface ( typeface ) ; Paint [ ] textPaints = assignTextColors ( texts ) ; canvas . drawText ( texts [ 0 ] , textGridWidths [ 3 ] , textGridHeights [ 0 ] , textPaints [ 0 ] ) ; canvas . drawText ( texts [ 1 ] , textGridWidths [ 4 ] , textGridHeights [ 1 ] , textPaints [ 1 ] ) ; canvas . drawText ( texts [ 2 ] , textGridWidths [ 5 ] , textGridHeights [ 2 ] , textPaints [ 2 ] ) ; canvas . drawText ( texts [ 3 ] , textGridWidths [ 6 ] , textGridHeights [ 3 ] , textPaints [ 3 ] ) ; canvas . drawText ( texts [ 4 ] , textGridWidths [ 5 ] , textGridHeights [ 4 ] , textPaints [ 4 ] ) ; canvas . drawText ( texts [ 5 ] , textGridWidths [ 4 ] , textGridHeights [ 5 ] , textPaints [ 5 ] ) ; canvas . drawText ( texts [ 6 ] , textGridWidths [ 3 ] , textGridHeights [ 6 ] , textPaints [ 6 ] ) ; canvas . drawText ( texts [ 7 ] , textGridWidths [ 2 ] , textGridHeights [ 5 ] , textPaints [ 7 ] ) ; canvas . drawText ( texts [ 8 ] , textGridWidths [ 1 ] , textGridHeights [ 4 ] , textPaints [ 8 ] ) ; canvas . drawText ( texts [ 9 ] , textGridWidths [ 0 ] , textGridHeights [ 3 ] , textPaints [ 9 ] ) ; canvas . drawText ( texts [ 10 ] , textGridWidths [ 1 ] , textGridHeights [ 2 ] , textPaints [ 10 ] ) ; canvas . drawText ( texts [ 11 ] , textGridWidths [ 2 ] , textGridHeights [ 1 ] , textPaints [ 11 ] ) ;
public class OperationsInner { /** * Exposing Available Operations . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; OperationInner & gt ; object */ public Observable < Page < OperationInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < OperationInner > > , Page < OperationInner > > ( ) { @ Override public Page < OperationInner > call ( ServiceResponse < Page < OperationInner > > response ) { return response . body ( ) ; } } ) ;
public class ShardWriter { /** * and then delete the temp dir from the local FS */ private void moveFromTempToPerm ( ) throws IOException { } }
try { FileStatus [ ] fileStatus = localFs . listStatus ( temp , LuceneIndexFileNameFilter . getFilter ( ) ) ; Path segmentsPath = null ; Path segmentsGenPath = null ; // move the files created in temp dir except segments _ N and segments . gen for ( int i = 0 ; i < fileStatus . length ; i ++ ) { Path path = fileStatus [ i ] . getPath ( ) ; String name = path . getName ( ) ; if ( LuceneUtil . isSegmentsGenFile ( name ) ) { assert ( segmentsGenPath == null ) ; segmentsGenPath = path ; } else if ( LuceneUtil . isSegmentsFile ( name ) ) { assert ( segmentsPath == null ) ; segmentsPath = path ; } else { fs . completeLocalOutput ( new Path ( perm , name ) , path ) ; } } // move the segments _ N file if ( segmentsPath != null ) { fs . completeLocalOutput ( new Path ( perm , segmentsPath . getName ( ) ) , segmentsPath ) ; } // move the segments . gen file if ( segmentsGenPath != null ) { fs . completeLocalOutput ( new Path ( perm , segmentsGenPath . getName ( ) ) , segmentsGenPath ) ; } } finally { // finally delete the temp dir ( files should have been deleted ) localFs . delete ( temp ) ; }
public class TransactionImpl { /** * { @ inheritDoc } */ public boolean delistResource ( XAResource xaRes , int flag ) throws IllegalStateException , SystemException { } }
if ( status == Status . STATUS_UNKNOWN ) throw new IllegalStateException ( "Status unknown" ) ; if ( status != Status . STATUS_ACTIVE && status != Status . STATUS_MARKED_ROLLBACK ) throw new IllegalStateException ( "Status not valid" ) ; return true ;
public class NewServiceEntry { /** * See https : / / www . consul . io / api / agent / service . html # checks . * @ param checks The health checks * @ return The { @ link NewServiceEntry } instance */ public NewServiceEntry checks ( List < NewCheck > checks ) { } }
if ( checks != null ) { this . checks . addAll ( checks ) ; } return this ;
public class InternalLogFactory { /** * Returns an SDK logger that logs using the currently configured default * log factory , given the class . */ public static InternalLogApi getLog ( Class < ? > clazz ) { } }
return factoryConfigured ? factory . doGetLog ( clazz ) : new InternalLog ( clazz . getName ( ) ) ; // will look up actual logger per log
public class ApiUtil { public static Database get_db_obj ( final String host , final String port ) throws DevFailed { } }
return apiutilDAO . get_db_obj ( host , port ) ;
public class RandomAccessReader { /** * Gets whether a bit at a specific index is set or not . * @ param index the number of bits at which to test * @ return true if the bit is set , otherwise false * @ throws IOException the buffer does not contain enough bytes to service the request , or index is negative */ public boolean getBit ( int index ) throws IOException { } }
int byteIndex = index / 8 ; int bitIndex = index % 8 ; validateIndex ( byteIndex , 1 ) ; byte b = getByte ( byteIndex ) ; return ( ( b >> bitIndex ) & 1 ) == 1 ;
public class QuarterSkin { /** * * * * * * Private Methods * * * * * */ private double getStartAngle ( ) { } }
ScaleDirection scaleDirection = gauge . getScaleDirection ( ) ; Pos knobPosition = gauge . getKnobPosition ( ) ; switch ( knobPosition ) { case BOTTOM_LEFT : return ScaleDirection . CLOCKWISE == scaleDirection ? 180 : 90 ; case TOP_LEFT : return ScaleDirection . CLOCKWISE == scaleDirection ? 90 : 0 ; case TOP_RIGHT : return ScaleDirection . CLOCKWISE == scaleDirection ? 0 : 270 ; case BOTTOM_RIGHT : default : return ScaleDirection . CLOCKWISE == scaleDirection ? 270 : 180 ; }
public class GrafeasV1Beta1Client { /** * Deletes the specified note . * < p > Sample code : * < pre > < code > * try ( GrafeasV1Beta1Client grafeasV1Beta1Client = GrafeasV1Beta1Client . create ( ) ) { * NoteName name = NoteName . of ( " [ PROJECT ] " , " [ NOTE ] " ) ; * grafeasV1Beta1Client . deleteNote ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The name of the note in the form of ` projects / [ PROVIDER _ ID ] / notes / [ NOTE _ ID ] ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteNote ( String name ) { } }
DeleteNoteRequest request = DeleteNoteRequest . newBuilder ( ) . setName ( name ) . build ( ) ; deleteNote ( request ) ;
public class MatchingPermission { /** * Returns a string escaped so it will be interpreted literally by the matcher . Specifically it converts all * ' | ' and ' * ' characters to " \ | " and " \ * " respectively . */ public static String escape ( String raw ) { } }
checkNotNull ( raw , "raw" ) ; String escaped = raw ; escaped = escaped . replaceAll ( UNESCAPED_WILDCARD_REGEX , WILDCARD_ESCAPE ) ; escaped = escapeSeparators ( escaped ) ; return escaped ;
public class CollectorServlet { /** * see https : / / github . com / codecentric / spring - boot - admin / pull / 450 */ public static void addCollectorApplication ( String application , String urls ) throws IOException { } }
Parameters . addCollectorApplication ( application , Parameters . parseUrl ( urls ) ) ;
public class LdaptivePersonAttributeDao { /** * Initializes the object after properties are set . */ @ PostConstruct public void initialize ( ) { } }
for ( final SearchScope scope : SearchScope . values ( ) ) { if ( scope . ordinal ( ) == this . searchControls . getSearchScope ( ) ) { this . searchScope = scope ; } }
public class MavenJDOMWriter { /** * Method updateActivationOS . * @ param value * @ param element * @ param counter * @ param xmlTag */ protected void updateActivationOS ( ActivationOS value , String xmlTag , Counter counter , Element element ) { } }
boolean shouldExist = value != null ; Element root = updateElement ( counter , element , xmlTag , shouldExist ) ; if ( shouldExist ) { Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; findAndReplaceSimpleElement ( innerCount , root , "name" , value . getName ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "family" , value . getFamily ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "arch" , value . getArch ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "version" , value . getVersion ( ) , null ) ; }
public class NodeImpl { /** * { @ inheritDoc } */ public String getUUID ( ) throws UnsupportedRepositoryOperationException , RepositoryException { } }
checkValid ( ) ; if ( isNodeType ( Constants . MIX_REFERENCEABLE ) ) { return this . getInternalIdentifier ( ) ; } throw new UnsupportedRepositoryOperationException ( "Node " + getPath ( ) + " is not referenceable" ) ;
public class MappedTextFormatDecorator { /** * Convert get text output from each Textable in map . * Return the format output using Text . format . * Note the bind template is retrieved from * the URL provided in templateUrl . * @ see nyla . solutions . core . data . Textable # getText ( ) */ public String getText ( ) { } }
// convert textable to map of text Object key = null ; try { // read bindTemplate String bindTemplate = getTemplate ( ) ; Map < Object , String > textMap = new Hashtable < Object , String > ( ) ; for ( Map . Entry < String , Textable > entry : map . entrySet ( ) ) { key = entry . getKey ( ) ; try { // convert to text textMap . put ( key , ( entry . getValue ( ) ) . getText ( ) ) ; } catch ( Exception e ) { throw new SystemException ( "Unable to build text for key:" + key + " error:" + e . getMessage ( ) , e ) ; } } Debugger . println ( this , "bindTemplate=" + bindTemplate ) ; String formattedOutput = Text . format ( bindTemplate , textMap ) ; Debugger . println ( this , "formattedOutput=" + formattedOutput ) ; return formattedOutput ; } catch ( RuntimeException e ) { throw e ; } catch ( Exception e ) { throw new SetupException ( e . getMessage ( ) , e ) ; }
public class DSet { /** * Creates a new DSet of the appropriate generic type . */ public static < E extends DSet . Entry > DSet < E > newDSet ( Iterable < ? extends E > source ) { } }
return new DSet < E > ( source ) ;
public class TemplatedProcessor { /** * - - - - - @ DataElement fields and methods */ private List < DataElementInfo > processDataElements ( TypeElement type , TemplateSelector templateSelector , org . jsoup . nodes . Element root ) { } }
List < DataElementInfo > dataElements = new ArrayList < > ( ) ; // fields ElementFilter . fieldsIn ( type . getEnclosedElements ( ) ) . stream ( ) . filter ( field -> MoreElements . isAnnotationPresent ( field , DataElement . class ) ) . forEach ( field -> { // verify the field if ( field . getModifiers ( ) . contains ( Modifier . PRIVATE ) ) { abortWithError ( field , "@%s member must not be private" , DataElement . class . getSimpleName ( ) ) ; } if ( field . getModifiers ( ) . contains ( Modifier . STATIC ) ) { abortWithError ( field , "@%s member must not be static" , DataElement . class . getSimpleName ( ) ) ; } Kind kind = getDataElementInfoKind ( field . asType ( ) ) ; if ( kind == Kind . Custom ) { warning ( field , "Unknown type %s. Consider using one of %s." , field . asType ( ) , EnumSet . complementOf ( EnumSet . of ( Kind . Custom ) ) ) ; } // verify the selector String selector = getSelector ( field ) ; verifySelector ( selector , field , templateSelector , root ) ; // verify the HTMLElement type String typeName = MoreTypes . asTypeElement ( field . asType ( ) ) . getQualifiedName ( ) . toString ( ) ; if ( kind == Kind . HTMLElement ) { verifyHTMLElement ( typeName , selector , field , templateSelector , root ) ; } // create info class for template processing dataElements . add ( new DataElementInfo ( typeName , field . getSimpleName ( ) . toString ( ) , selector , kind , false ) ) ; } ) ; // methods ElementFilter . methodsIn ( type . getEnclosedElements ( ) ) . stream ( ) . filter ( method -> MoreElements . isAnnotationPresent ( method , DataElement . class ) ) . forEach ( method -> { // verify method if ( method . getModifiers ( ) . contains ( Modifier . PRIVATE ) ) { abortWithError ( method , "@%s method must not be private" , DataElement . class . getSimpleName ( ) ) ; } if ( method . getModifiers ( ) . contains ( Modifier . STATIC ) ) { abortWithError ( method , "@%s method must not be static" , DataElement . class . getSimpleName ( ) ) ; } Kind kind = getDataElementInfoKind ( method . getReturnType ( ) ) ; if ( kind == Kind . Custom ) { warning ( method , "Unknown return type %s. Consider using one of %s." , method . getReceiverType ( ) , EnumSet . complementOf ( EnumSet . of ( Kind . Custom ) ) ) ; } if ( ! method . getParameters ( ) . isEmpty ( ) ) { abortWithError ( method , "@%s method must not have parameters" , DataElement . class . getSimpleName ( ) ) ; } // verify the selector String selector = getSelector ( method ) ; verifySelector ( selector , method , templateSelector , root ) ; // verify the HTMLElement type String typeName = MoreTypes . asTypeElement ( method . getReturnType ( ) ) . getQualifiedName ( ) . toString ( ) ; if ( kind == Kind . HTMLElement ) { verifyHTMLElement ( typeName , selector , method , templateSelector , root ) ; } // create info class for template processing dataElements . add ( new DataElementInfo ( typeName , method . getSimpleName ( ) . toString ( ) , selector , kind , true ) ) ; } ) ; return dataElements ;
public class ScenarioImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case BpsimPackage . SCENARIO__SCENARIO_PARAMETERS : setScenarioParameters ( ( ScenarioParameters ) null ) ; return ; case BpsimPackage . SCENARIO__ELEMENT_PARAMETERS : getElementParameters ( ) . clear ( ) ; return ; case BpsimPackage . SCENARIO__CALENDAR : getCalendar ( ) . clear ( ) ; return ; case BpsimPackage . SCENARIO__VENDOR_EXTENSION : getVendorExtension ( ) . clear ( ) ; return ; case BpsimPackage . SCENARIO__AUTHOR : setAuthor ( AUTHOR_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__CREATED : setCreated ( CREATED_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__DESCRIPTION : setDescription ( DESCRIPTION_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__ID : setId ( ID_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__INHERITS : setInherits ( INHERITS_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__MODIFIED : setModified ( MODIFIED_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__NAME : setName ( NAME_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__RESULT : setResult ( RESULT_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__VENDOR : setVendor ( VENDOR_EDEFAULT ) ; return ; case BpsimPackage . SCENARIO__VERSION : setVersion ( VERSION_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class TaskInProgress { /** * The TIP ' s been ordered kill ( ) ed . */ public void kill ( ) { } }
if ( isComplete ( ) || failed ) { return ; } this . failed = true ; killed = true ; this . execFinishTime = JobTracker . getClock ( ) . getTime ( ) ; recomputeProgress ( ) ;
public class SAMLAuthFilter { /** * Handles an authentication request . * @ param request HTTP request * @ param response HTTP response * @ return an authentication object that contains the principal object if successful . * @ throws IOException ex */ @ Override public Authentication attemptAuthentication ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
final String requestURI = request . getRequestURI ( ) ; UserAuthentication userAuth = null ; String appid ; if ( requestURI . startsWith ( SAML_ACTION ) ) { appid = Config . getRootAppIdentifier ( ) ; if ( requestURI . startsWith ( SAML_ACTION + "/" ) ) { String id = Utils . urlDecode ( StringUtils . removeStart ( requestURI , SAML_ACTION + "/" ) ) ; if ( ! id . isEmpty ( ) ) { appid = id ; } else { appid = Config . getRootAppIdentifier ( ) ; } } try { App app = Para . getDAO ( ) . read ( App . id ( appid == null ? Config . getRootAppIdentifier ( ) : appid ) ) ; if ( app != null ) { SettingsBuilder builder = new SettingsBuilder ( ) ; Map < String , Object > samlSettings ; String idpMetaUrl = SecurityUtils . getSettingForApp ( app , "security.saml.idp.metadata_url" , "" ) ; if ( StringUtils . isBlank ( idpMetaUrl ) ) { samlSettings = getSAMLSettings ( app ) ; } else { samlSettings = parseRemoteXML ( new URL ( idpMetaUrl ) , request . getParameter ( "entityid" ) ) ; samlSettings . putAll ( getSAMLSettings ( app ) ) ; // override IDP meta with config values } Saml2Settings settings = builder . fromValues ( samlSettings ) . build ( ) ; Auth auth = new Auth ( settings , request , response ) ; if ( request . getParameter ( "SAMLResponse" ) != null ) { auth . processResponse ( ) ; if ( auth . isAuthenticated ( ) ) { List < String > errors = auth . getErrors ( ) ; if ( errors . isEmpty ( ) ) { userAuth = getOrCreateUser ( app , auth . getAttributes ( ) ) ; } else { throw new AuthenticationServiceException ( StringUtils . join ( errors , "; " ) ) ; } } } else { auth . login ( settings . getSpAssertionConsumerServiceUrl ( ) . toString ( ) ) ; return null ; // redirect to IDP } } } catch ( SettingsException ex ) { LOG . error ( "Failed to authenticate app '{}' with SAML: {}" , appid , ex . getMessage ( ) ) ; } catch ( Exception ex ) { LOG . error ( null , ex ) ; } } return SecurityUtils . checkIfActive ( userAuth , SecurityUtils . getAuthenticatedUser ( userAuth ) , true ) ;
public class MalmoEnvServer { /** * Handler for < Step _ > messages . Single digit option code after _ specifies if turnkey and info are included in message . */ private void step ( String command , Socket socket , DataInputStream din ) throws IOException { } }
String actions = command . substring ( stepTagLength , command . length ( ) - ( stepTagLength + 2 ) ) ; int options = Character . getNumericValue ( command . charAt ( stepTagLength - 2 ) ) ; boolean withTurnkey = options < 2 ; boolean withInfo = options == 0 || options == 2 ; // TCPUtils . Log ( Level . FINE , " Command ( step action ) : " + actionCommand + " options " + options ) ; byte [ ] stepTurnKey ; if ( withTurnkey ) { int hdr ; hdr = din . readInt ( ) ; stepTurnKey = new byte [ hdr ] ; din . readFully ( stepTurnKey ) ; } else { stepTurnKey = new byte [ 0 ] ; } DataOutputStream dout = new DataOutputStream ( socket . getOutputStream ( ) ) ; double reward = 0.0 ; boolean done ; byte [ ] obs ; String info = "" ; byte [ ] currentTurnKey ; byte [ ] nextTurnKey ; boolean sent = false ; lock . lock ( ) ; try { done = envState . done ; obs = getObservation ( done ) ; // If done or we have new observation and it ' s our turn then submit command and pick up rewards . currentTurnKey = envState . turnKey . getBytes ( ) ; boolean outOfTurn = true ; nextTurnKey = currentTurnKey ; if ( ! done && obs . length > 0 && actions != "" ) { // CurrentKey StepKey Action ( WithKey ) nextTurnKey outOfTurn // " " " " Y Current N // " " X N Step Y // X 0 N Current Y // X X Y ( WK ) Current N // X Y N Current Y // TCPUtils . Log ( Level . FINE , " current TK " + envState . turnKey + " step TK " + new String ( stepTurnKey ) ) ; if ( currentTurnKey . length == 0 ) { if ( stepTurnKey . length == 0 ) { if ( actions . contains ( "\n" ) ) { String [ ] cmds = actions . split ( "\\n" ) ; for ( String cmd : cmds ) { envState . commands . add ( cmd ) ; } } else { if ( ! actions . isEmpty ( ) ) envState . commands . add ( actions ) ; } outOfTurn = false ; sent = true ; } else { nextTurnKey = stepTurnKey ; } } else { if ( stepTurnKey . length != 0 ) { if ( Arrays . equals ( currentTurnKey , stepTurnKey ) ) { // The step turn key may later still be stale when picked up from the command queue . envState . commands . add ( new String ( stepTurnKey ) + " " + actions ) ; outOfTurn = false ; envState . turnKey = "" ; envState . lastTurnKey = new String ( stepTurnKey ) ; sent = true ; } } } } if ( done || ( obs . length > 0 && ! outOfTurn ) ) { // Pick up rewards . reward = envState . reward ; envState . reward = 0.0 ; if ( withInfo ) { info = envState . info ; envState . info = "" ; if ( info . isEmpty ( ) && ! done ) { try { cond . await ( COND_WAIT_SECONDS , TimeUnit . SECONDS ) ; } catch ( InterruptedException ie ) { } info = envState . info ; envState . info = "" ; if ( envState . obs != null && envState . obs != obs ) { // Later observation . obs = envState . obs ; } } } envState . obs = null ; } } finally { lock . unlock ( ) ; } dout . writeInt ( obs . length ) ; dout . write ( obs ) ; dout . writeInt ( BYTES_DOUBLE + 2 ) ; dout . writeDouble ( reward ) ; dout . writeByte ( done ? 1 : 0 ) ; dout . writeByte ( sent ? 1 : 0 ) ; if ( withInfo ) { byte [ ] infoBytes = info . getBytes ( utf8 ) ; dout . writeInt ( infoBytes . length ) ; dout . write ( infoBytes ) ; } if ( withTurnkey ) { dout . writeInt ( nextTurnKey . length ) ; dout . write ( nextTurnKey ) ; } dout . flush ( ) ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Caches the cp definition option value rels in the entity cache if it is enabled . * @ param cpDefinitionOptionValueRels the cp definition option value rels */ @ Override public void cacheResult ( List < CPDefinitionOptionValueRel > cpDefinitionOptionValueRels ) { } }
for ( CPDefinitionOptionValueRel cpDefinitionOptionValueRel : cpDefinitionOptionValueRels ) { if ( entityCache . getResult ( CPDefinitionOptionValueRelModelImpl . ENTITY_CACHE_ENABLED , CPDefinitionOptionValueRelImpl . class , cpDefinitionOptionValueRel . getPrimaryKey ( ) ) == null ) { cacheResult ( cpDefinitionOptionValueRel ) ; } else { cpDefinitionOptionValueRel . resetOriginalValues ( ) ; } }
public class JBossASClient { /** * Convienence method that builds a partial operation request node . * @ param operation the operation to be requested * @ param address identifies the target resource * @ return the partial operation request node - caller should fill this in further to complete the node */ public static ModelNode createRequest ( String operation , Address address ) { } }
return createRequest ( operation , address , null ) ;
public class NodeTemplateClient { /** * Gets the access control policy for a resource . May be empty if no such policy or resource * exists . * < p > Sample code : * < pre > < code > * try ( NodeTemplateClient nodeTemplateClient = NodeTemplateClient . create ( ) ) { * ProjectRegionNodeTemplateResourceName resource = ProjectRegionNodeTemplateResourceName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ RESOURCE ] " ) ; * Policy response = nodeTemplateClient . getIamPolicyNodeTemplate ( resource . toString ( ) ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy getIamPolicyNodeTemplate ( String resource ) { } }
GetIamPolicyNodeTemplateHttpRequest request = GetIamPolicyNodeTemplateHttpRequest . newBuilder ( ) . setResource ( resource ) . build ( ) ; return getIamPolicyNodeTemplate ( request ) ;
public class Histogram { /** * Generate the histogram of k bins . * @ param data the data points . * @ param k the number of bins . * @ return a 3 - by - k bins array of which first row is the lower bound of bins , * second row is the upper bound of bins , and the third row is the frequence * count . */ public static double [ ] [ ] histogram ( int [ ] data , int k ) { } }
if ( k <= 1 ) { throw new IllegalArgumentException ( "Invalid number of bins: " + k ) ; } int min = Math . min ( data ) ; int max = Math . max ( data ) ; int span = max - min + 1 ; int width = 1 ; int residual = 1 ; while ( residual > 0 ) { width = span / k ; if ( width == 0 ) { width = 1 ; } residual = span - k * width ; if ( residual > 0 ) { k += 1 ; } } double center = width / 2.0 ; double [ ] breaks = new double [ k + 1 ] ; breaks [ 0 ] = min - center ; for ( int i = 1 ; i <= k ; i ++ ) { breaks [ i ] = breaks [ i - 1 ] + width ; } return histogram ( data , breaks ) ;
public class FileUtil { /** * 简单写入String到File . */ public static void write ( final CharSequence data , final File file ) throws IOException { } }
Validate . notNull ( file ) ; Validate . notNull ( data ) ; try ( BufferedWriter writer = Files . newBufferedWriter ( file . toPath ( ) , Charsets . UTF_8 ) ) { writer . append ( data ) ; }
public class VdmThreadEventHandler { private void fireDeltaUpdatingThread ( IThread thread , int flags ) { } }
ModelDelta delta = buildRootDelta ( ) ; ModelDelta node = addPathToThread ( delta , thread ) ; node = node . addNode ( thread , flags ) ; fireDelta ( delta ) ;
public class CommerceAccountPersistenceImpl { /** * Returns the last commerce account in the ordered set where companyId = & # 63 ; . * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce account * @ throws NoSuchAccountException if a matching commerce account could not be found */ @ Override public CommerceAccount findByCompanyId_Last ( long companyId , OrderByComparator < CommerceAccount > orderByComparator ) throws NoSuchAccountException { } }
CommerceAccount commerceAccount = fetchByCompanyId_Last ( companyId , orderByComparator ) ; if ( commerceAccount != null ) { return commerceAccount ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "companyId=" ) ; msg . append ( companyId ) ; msg . append ( "}" ) ; throw new NoSuchAccountException ( msg . toString ( ) ) ;
public class GenericCompiler { /** * Helper method for the constructor . */ private java . lang . reflect . Method findConstr ( String name ) { } }
Selection slkt ; int i ; slkt = Method . forName ( name ) ; slkt = slkt . restrictArgumentCount ( fields . length ) ; for ( i = 0 ; i < fields . length ; i ++ ) { slkt . restrictArgumentType ( i , fields [ i ] . getReturnType ( ) ) ; } switch ( slkt . size ( ) ) { case 0 : throw new RuntimeException ( "no such constructor: " + name ) ; case 1 : return ( ( Method ) slkt . getFunction ( ) ) . getRaw ( ) ; default : throw new RuntimeException ( "constructor ambiguous: " + name ) ; }
public class FileSystemClassInformationRepository { /** * Constructs a { @ link FileSystemClassInformationRepository } object and loads it up with the classes in a classpath . * @ param initialClasspath classpath to scan for class information ( can be JAR files and / or folders ) * @ return newly created { @ link FileSystemClassInformationRepository } object * @ throws NullPointerException if any argument is { @ code null } or contains { @ code null } elements * @ throws IOException if an IO error occurs */ public static FileSystemClassInformationRepository create ( List < File > initialClasspath ) throws IOException { } }
Validate . notNull ( initialClasspath ) ; Validate . noNullElements ( initialClasspath ) ; FileSystemClassInformationRepository repo = new FileSystemClassInformationRepository ( ) ; repo . addClasspath ( initialClasspath ) ; return repo ;
public class TransformerImpl { /** * Get a SAX2 ContentHandler for the input . * @ param doDocFrag true if a DocumentFragment should be created as * the root , rather than a Document . * @ return A valid ContentHandler , which should never be null , as * long as getFeature ( " http : / / xml . org / trax / features / sax / input " ) * returns true . */ public ContentHandler getInputContentHandler ( boolean doDocFrag ) { } }
if ( null == m_inputContentHandler ) { // if ( null = = m _ urlOfSource & & null ! = m _ stylesheetRoot ) // m _ urlOfSource = m _ stylesheetRoot . getBaseIdentifier ( ) ; m_inputContentHandler = new TransformerHandlerImpl ( this , doDocFrag , m_urlOfSource ) ; } return m_inputContentHandler ;
public class MessageProcessor { /** * Indicates that the WAS server is now open for E - business . * This event is flowed to all instances of mediation point , as it opens the * " gate " wherebye mediation work can now begin in anger . * Part of the JsEngineComponent interface . */ @ Override public void serverStarted ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "serverStarted" ) ; synchronized ( _mpStartStopLock ) { if ( ! _started || _isWASOpenForEBusiness ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "serverStarted" , "Returning as ME not started " + _started + " or already announced " + _isWASOpenForEBusiness ) ; return ; } _isWASOpenForEBusiness = true ; } _destinationManager . announceWASOpenForEBusiness ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "serverStarted" ) ;
public class DbHelperBase { /** * Moved to DbHelper * @ deprecated */ @ Deprecated private String idSuffix ( String name , DomainObject to ) { } }
if ( useIdSuffixInForeignKey ( ) ) { Attribute idAttribute = getIdAttribute ( to ) ; if ( idAttribute != null ) { String idName = idAttribute . getDatabaseColumn ( ) . toUpperCase ( ) ; String convertedName = convertDatabaseName ( name ) ; if ( idName . equals ( convertedName ) && idName . startsWith ( to . getDatabaseTable ( ) ) ) { idName = idName . substring ( to . getDatabaseTable ( ) . length ( ) ) ; } else if ( idName . startsWith ( convertedName ) ) { idName = idName . substring ( convertedName . length ( ) ) ; } if ( idName . startsWith ( "_" ) ) { return idName ; } else { return ( "_" + idName ) ; } } } return "" ;
public class SheetColumn { /** * The column width * @ return */ public Integer getColWidth ( ) { } }
final Object result = getStateHelper ( ) . eval ( PropertyKeys . colWidth , null ) ; if ( result == null ) { return null ; } return Integer . valueOf ( result . toString ( ) ) ;
public class OptionsBuilder { /** * Seths the dynamic arguments * @ param dynamicArgs * Map of dynamic arguments * @ return Options # DYNAMIC _ ARGS */ public OptionsBuilder dynamicArgs ( Map < String , ? > dynamicArgs ) { } }
final Map < String , Object > m = dynamicArgs != null ? Collections . unmodifiableMap ( new HashMap < String , Object > ( dynamicArgs ) ) : Collections . < String , Object > emptyMap ( ) ; options . put ( Options . DYNAMIC_ARGS , m ) ; return this ;
public class MessageInitialProcess { /** * RegisterInitialProcesses Method . */ public void registerInitialProcesses ( ) { } }
MessageProcessInfo recMessageProcessInfo = new MessageProcessInfo ( this ) ; try { // Always register this generic processing queue . this . registerProcessForMessage ( new BaseMessageFilter ( MessageConstants . TRX_SEND_QUEUE , MessageConstants . INTERNET_QUEUE , null , null ) , null , null ) ; recMessageProcessInfo . close ( ) ; while ( recMessageProcessInfo . hasNext ( ) ) { recMessageProcessInfo . next ( ) ; String strQueueName = recMessageProcessInfo . getQueueName ( true ) ; String strQueueType = recMessageProcessInfo . getQueueType ( true ) ; String strProcessClass = recMessageProcessInfo . getField ( MessageProcessInfo . PROCESSOR_CLASS ) . toString ( ) ; Map < String , Object > properties = ( ( PropertiesField ) recMessageProcessInfo . getField ( MessageProcessInfo . PROPERTIES ) ) . getProperties ( ) ; Record recMessageType = ( ( ReferenceField ) recMessageProcessInfo . getField ( MessageProcessInfo . MESSAGE_TYPE_ID ) ) . getReference ( ) ; if ( recMessageType != null ) { // Start all processes that handle INcoming REQUESTs . String strMessageType = recMessageType . getField ( MessageType . CODE ) . toString ( ) ; Record recMessageInfo = ( ( ReferenceField ) recMessageProcessInfo . getField ( MessageProcessInfo . MESSAGE_INFO_ID ) ) . getReference ( ) ; if ( recMessageInfo != null ) { Record recMessageInfoType = ( ( ReferenceField ) recMessageInfo . getField ( MessageInfo . MESSAGE_INFO_TYPE_ID ) ) . getReference ( ) ; if ( recMessageInfoType != null ) { String strMessageInfoType = recMessageInfoType . getField ( MessageInfoType . CODE ) . toString ( ) ; if ( MessageInfoType . REQUEST . equals ( strMessageInfoType ) ) if ( MessageType . MESSAGE_IN . equals ( strMessageType ) ) if ( ( strQueueName != null ) && ( strQueueName . length ( ) > 0 ) ) this . registerProcessForMessage ( new BaseMessageFilter ( strQueueName , strQueueType , null , null ) , strProcessClass , properties ) ; } } } } recMessageProcessInfo . close ( ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } finally { recMessageProcessInfo . free ( ) ; }
public class DestinationManager { /** * < p > This method is used to create a link that is localised on this ME . < / p > */ private void createLinkLocalization ( VirtualLinkDefinition virtualLinkDefinition , LocalTransaction transaction ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createLinkLocalization" , new Object [ ] { virtualLinkDefinition , transaction } ) ; boolean linkCreated = false ; LinkHandler linkHandler = null ; // Include links that are awaiting deletion as we may need to re - use their // stream state . LinkTypeFilter filter = new LinkTypeFilter ( ) ; DestinationHandler dh = linkIndex . findByName ( virtualLinkDefinition . getName ( ) , filter ) ; if ( dh == null ) { // Create a new LinkHandler , which is created locked linkHandler = new LinkHandler ( virtualLinkDefinition , messageProcessor , this , transaction , durableSubscriptions ) ; linkCreated = true ; // The link is localised here , set the isLocal flag . // TODO could we use a different flag to avoid confusion ? linkHandler . setLocal ( ) ; LinkIndex . Type type = new LinkIndex . Type ( ) ; type . local = Boolean . TRUE ; type . mqLink = Boolean . FALSE ; type . remote = Boolean . FALSE ; type . state = State . CREATE_IN_PROGRESS ; linkIndex . put ( linkHandler , type ) ; linkHandler . registerControlAdapters ( ) ; } else { linkHandler = ( LinkHandler ) dh ; } // If the link is corrupt , do not attempt any update . if ( ! linkHandler . isCorruptOrIndoubt ( ) ) { synchronized ( linkHandler ) { // If we didnt just create the linkHandler and the one that already // existed is for a different UUID , or there is already a localisation for // the linkHandler on this ME , then this could be a problem and further // tests are required . if ( ! linkCreated ) { // Use the existing linkHandler of the same name as the stream state associated with it needs // to be associated with the new LinkHandler . This is because the ME at the other end // of the link in the other bus only knows the LinkHandler by name and cannot // tell that it has been deleted and recreated , so could still be sending using the old // stream state synchronized ( linkIndex ) { LinkIndex . Type type = null ; if ( linkIndex . get ( linkHandler . getUuid ( ) ) != null ) { type = ( LinkIndex . Type ) linkIndex . getType ( linkHandler ) ; linkIndex . remove ( linkHandler ) ; } else type = new LinkIndex . Type ( ) ; linkHandler . updateUuid ( virtualLinkDefinition . getUuid ( ) ) ; type . local = Boolean . TRUE ; type . remote = Boolean . FALSE ; type . mqLink = Boolean . FALSE ; type . state = State . ACTIVE ; // Alert the lookups object to handle the re - definition linkIndex . put ( linkHandler , type ) ; // Unset the " to - be - deleted " indicator if ( linkHandler . isToBeDeleted ( ) || ! linkHandler . getUuid ( ) . equals ( virtualLinkDefinition . getUuid ( ) ) ) { linkHandler . setToBeDeleted ( false ) ; linkIndex . cleanup ( linkHandler ) ; } } linkHandler . updateLinkDefinition ( virtualLinkDefinition , transaction ) ; } // Get the uuid of the ME in the other bus from TRM , then // create a transmit q to it , to store messages originated from // this ME destined for the other bus LinkSelection s = null ; LinkManager linkManager = messageProcessor . getLinkManager ( ) ; try { s = linkManager . select ( linkHandler . getUuid ( ) ) ; } catch ( LinkException e ) { // Error during create of the link . Trace an FFST . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.DestinationManager.createLinkLocalization" , "1:6182:1.508.1.7" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createLinkLocalization" , e ) ; throw new SIResourceException ( e ) ; } SIBUuid8 localisingME = null ; SIBUuid8 routingME = null ; if ( s != null ) { localisingME = s . getInboundMeUuid ( ) ; routingME = s . getOutboundMeUuid ( ) ; } else { localisingME = new SIBUuid8 ( SIMPConstants . UNKNOWN_UUID ) ; routingME = null ; } if ( linkCreated || ! linkHandler . hasRemote ( ) ) // PK76306 - if we don ' t have a remote localisation then create one { linkHandler . addNewPtoPLocalization ( true , transaction , localisingME , null , true ) ; } else linkHandler . updateLocalisationSet ( localisingME , routingME ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createLinkLocalization" ) ;
public class BaseMenuScreen { /** * Get the command string that will restore this screen . * @ return The URL for the current screen . */ public String getScreenURL ( ) { } }
String strURL = super . getScreenURL ( ) ; if ( this . getClass ( ) . getName ( ) . equals ( BaseMenuScreen . class . getName ( ) ) ) { strURL = this . addURLParam ( strURL , DBParams . RECORD , this . getMainRecord ( ) . getClass ( ) . getName ( ) ) ; strURL = this . addURLParam ( strURL , DBParams . COMMAND , MenuConstants . MENUREC ) ; } else strURL = this . addURLParam ( strURL , DBParams . SCREEN , this . getClass ( ) . getName ( ) ) ; try { if ( this . getMainRecord ( ) != null ) if ( ( this . getMainRecord ( ) . getEditMode ( ) == Constants . EDIT_IN_PROGRESS ) || ( this . getMainRecord ( ) . getEditMode ( ) == Constants . EDIT_CURRENT ) ) { String strBookmark = this . getMainRecord ( ) . getHandle ( DBConstants . OBJECT_ID_HANDLE ) . toString ( ) ; strURL = this . addURLParam ( strURL , DBConstants . STRING_OBJECT_ID_HANDLE , strBookmark ) ; } } catch ( DBException ex ) { Debug . print ( ex ) ; ex . printStackTrace ( ) ; } return strURL ;
public class Wordcount { @ Override public int run ( String [ ] args ) throws Exception { } }
Configuration conf = new Configuration ( ) ; String [ ] otherArgs = new GenericOptionsParser ( conf , args ) . getRemainingArgs ( ) ; if ( otherArgs . length != 2 ) { System . err . println ( "Usage: wordcount <in> <out>" ) ; return 2 ; } conf . set ( "nl.basjes.parse.apachehttpdlogline.format" , logFormat ) ; // A ' , ' separated list of fields conf . set ( "nl.basjes.parse.apachehttpdlogline.fields" , "STRING:request.status.last" ) ; Job job = Job . getInstance ( conf , "word count" ) ; job . setJarByClass ( Wordcount . class ) ; FileInputFormat . addInputPath ( job , new Path ( otherArgs [ 0 ] ) ) ; job . setInputFormatClass ( ApacheHttpdLogfileInputFormat . class ) ; job . setMapperClass ( TokenizerMapper . class ) ; job . setCombinerClass ( LongSumReducer . class ) ; job . setReducerClass ( LongSumReducer . class ) ; // configuration should contain reference to your namenode FileSystem fs = FileSystem . get ( conf ) ; // true stands for recursively deleting the folder you gave Path outputPath = new Path ( otherArgs [ 1 ] ) ; fs . delete ( outputPath , true ) ; FileOutputFormat . setOutputPath ( job , outputPath ) ; job . setOutputKeyClass ( Text . class ) ; job . setOutputValueClass ( LongWritable . class ) ; if ( job . waitForCompletion ( true ) ) { return 0 ; } return 1 ;
public class Choice7 { /** * Static factory method for wrapping a value of type < code > B < / code > in a { @ link Choice7 } . * @ param b the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ param < D > the fourth possible type * @ param < E > the fifth possible type * @ param < F > the sixth possible type * @ param < G > the seventh possible type * @ return the wrapped value as a { @ link Choice7 } & lt ; A , B , C , D , E , F , G & gt ; */ public static < A , B , C , D , E , F , G > Choice7 < A , B , C , D , E , F , G > b ( B b ) { } }
return new _B < > ( b ) ;
public class ExportDecoderBase { /** * This is for legacy connector . */ public boolean writeRow ( Object row [ ] , CSVWriter writer , boolean skipinternal , BinaryEncoding binaryEncoding , SimpleDateFormat dateFormatter ) { } }
int firstfield = getFirstField ( skipinternal ) ; try { String [ ] fields = new String [ m_tableSchema . size ( ) - firstfield ] ; for ( int i = firstfield ; i < m_tableSchema . size ( ) ; i ++ ) { if ( row [ i ] == null ) { fields [ i - firstfield ] = "NULL" ; } else if ( m_tableSchema . get ( i ) == VoltType . VARBINARY && binaryEncoding != null ) { if ( binaryEncoding == BinaryEncoding . HEX ) { fields [ i - firstfield ] = Encoder . hexEncode ( ( byte [ ] ) row [ i ] ) ; } else { fields [ i - firstfield ] = Encoder . base64Encode ( ( byte [ ] ) row [ i ] ) ; } } else if ( m_tableSchema . get ( i ) == VoltType . STRING ) { fields [ i - firstfield ] = ( String ) row [ i ] ; } else if ( m_tableSchema . get ( i ) == VoltType . TIMESTAMP && dateFormatter != null ) { TimestampType timestamp = ( TimestampType ) row [ i ] ; fields [ i - firstfield ] = dateFormatter . format ( timestamp . asApproximateJavaDate ( ) ) ; } else { fields [ i - firstfield ] = row [ i ] . toString ( ) ; } } writer . writeNext ( fields ) ; } catch ( Exception x ) { x . printStackTrace ( ) ; return false ; } return true ;
public class ItemImpl { /** * { @ inheritDoc } */ public void remove ( ) throws RepositoryException , ConstraintViolationException , VersionException , LockException { } }
checkValid ( ) ; if ( isRoot ( ) ) { throw new RepositoryException ( "Can't remove ROOT node." ) ; } // Check constraints ItemDefinition def ; if ( isNode ( ) ) { def = ( ( NodeImpl ) this ) . getDefinition ( ) ; } else { def = ( ( PropertyImpl ) this ) . getDefinition ( ) ; } if ( def . isMandatory ( ) || def . isProtected ( ) ) { throw new ConstraintViolationException ( "Can't remove mandatory or protected item " + getPath ( ) ) ; } NodeImpl parentNode = parent ( ) ; // Check if versionable ancestor is not checked - in if ( ! parentNode . checkedOut ( ) ) { throw new VersionException ( "Node " + parent ( ) . getPath ( ) + " or its nearest ancestor is checked-in" ) ; } // Check locking if ( ! parentNode . checkLocking ( ) ) { throw new LockException ( "Node " + parent ( ) . getPath ( ) + " is locked " ) ; } // launch event session . getActionHandler ( ) . preRemoveItem ( this ) ; removeVersionable ( ) ; // remove from datamanager dataManager . delete ( data ) ;
public class MongoDBQuery { /** * Handle special functions . * @ return the int */ private int handleSpecialFunctions ( ) { } }
boolean needsSpecialAttention = false ; outer : for ( UpdateClause c : kunderaQuery . getUpdateClauseQueue ( ) ) { for ( int i = 0 ; i < FUNCTION_KEYWORDS . length ; i ++ ) { if ( c . getValue ( ) instanceof String ) { String func = c . getValue ( ) . toString ( ) ; func = func . replaceAll ( " " , "" ) ; if ( func . toUpperCase ( ) . matches ( FUNCTION_KEYWORDS [ i ] ) ) { needsSpecialAttention = true ; c . setValue ( func ) ; break outer ; } } } } if ( ! needsSpecialAttention ) return - 1 ; EntityMetadata m = getEntityMetadata ( ) ; Metamodel metaModel = kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( m . getPersistenceUnit ( ) ) ; Queue filterClauseQueue = kunderaQuery . getFilterClauseQueue ( ) ; BasicDBObject query = createMongoQuery ( m , filterClauseQueue ) ; BasicDBObject update = new BasicDBObject ( ) ; for ( UpdateClause c : kunderaQuery . getUpdateClauseQueue ( ) ) { String columName = getColumnName ( m , metaModel . entity ( m . getEntityClazz ( ) ) , c . getProperty ( ) ) ; boolean isSpecialFunction = false ; for ( int i = 0 ; i < FUNCTION_KEYWORDS . length ; i ++ ) { if ( c . getValue ( ) instanceof String && c . getValue ( ) . toString ( ) . toUpperCase ( ) . matches ( FUNCTION_KEYWORDS [ i ] ) ) { isSpecialFunction = true ; if ( c . getValue ( ) . toString ( ) . toUpperCase ( ) . startsWith ( "INCREMENT(" ) ) { String val = c . getValue ( ) . toString ( ) . toUpperCase ( ) ; val = val . substring ( 10 , val . indexOf ( ")" ) ) ; update . put ( "$inc" , new BasicDBObject ( columName , Integer . valueOf ( val ) ) ) ; } else if ( c . getValue ( ) . toString ( ) . toUpperCase ( ) . startsWith ( "DECREMENT(" ) ) { String val = c . getValue ( ) . toString ( ) . toUpperCase ( ) ; val = val . substring ( 10 , val . indexOf ( ")" ) ) ; update . put ( "$inc" , new BasicDBObject ( columName , - Integer . valueOf ( val ) ) ) ; } } } if ( ! isSpecialFunction ) { update . put ( columName , c . getValue ( ) ) ; } } Client client = persistenceDelegeator . getClient ( m ) ; return ( ( MongoDBClient ) client ) . handleUpdateFunctions ( query , update , m . getTableName ( ) ) ;
public class DefaultDataGridResourceProvider { /** * Lookup a message given a message key . If chaining is enabled via * { @ link DataGridResourceProvider # setEnableChaining ( boolean ) } , the outer resource bundle will be searched , * first . If no message matching the message key is found , the default resource bundle is searched . * @ param key the key * @ return the message . */ private final String internalGetMessage ( String key ) { } }
if ( getResourceBundlePath ( ) == null ) return getDefaultMessage ( key ) ; else { /* ensure that the correct resource bundles are created */ if ( _resourceBundle == null ) { _resourceBundle = createResourceBundle ( getResourceBundlePath ( ) ) ; } if ( isEnableChaining ( ) && _resourceBundleKeys == null ) { Enumeration e = _resourceBundle . getKeys ( ) ; while ( e . hasMoreElements ( ) ) _resourceBundleKeys . put ( e . nextElement ( ) , VALUE_PLACEHOLDER ) ; } if ( ! isEnableChaining ( ) || _resourceBundleKeys . containsKey ( key ) ) return _resourceBundle . getString ( key ) ; else return _defaultResourceBundle . getString ( key ) ; }
public class Server { /** * Get MBean data * @ param name The bean name * @ return The data * @ exception JMException Thrown if an error occurs */ public static MBeanData getMBeanData ( String name ) throws JMException { } }
MBeanServer server = getMBeanServer ( ) ; ObjectName objName = new ObjectName ( name ) ; MBeanInfo info = server . getMBeanInfo ( objName ) ; return new MBeanData ( objName , info ) ;
public class Http100ContWriteCallback { /** * @ see * com . ibm . wsspi . tcpchannel . TCPWriteCompletedCallback # error ( com . ibm . wsspi . * channelfw . VirtualConnection , * com . ibm . wsspi . tcpchannel . TCPWriteRequestContext , java . io . IOException ) */ @ SuppressWarnings ( "unused" ) public void error ( VirtualConnection vc , TCPWriteRequestContext wsc , IOException ioe ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "error() called for vc=" + vc + " ioe=" + ioe ) ; } HttpInboundServiceContextImpl isc = ( HttpInboundServiceContextImpl ) vc . getStateMap ( ) . get ( CallbackIDs . CALLBACK_HTTPISC ) ; // log the failed response write if ( isc . getHttpConfig ( ) . getDebugLog ( ) . isEnabled ( DebugLog . Level . WARN ) ) { isc . getHttpConfig ( ) . getDebugLog ( ) . log ( DebugLog . Level . WARN , HttpMessages . MSG_WRITE_FAIL , isc ) ; } // close the connection now . No app channel has been involved at this // point yet . isc . getLink ( ) . close ( vc , ioe ) ;
public class TaskRunner { /** * Starts the next task if necessary . */ private void checkTasks ( ) { } }
if ( currentTask == null ) { Handler < Task > task = queue . poll ( ) ; if ( task != null ) { currentTask = new Task ( this ) ; task . handle ( currentTask ) ; } }
public class JBBPDslBuilder { /** * Create named var array with fixed size . * @ param name name of the array , can be null for anonymous one * @ param sizeExpression expression to calculate size of the array , must not be null . * @ param param optional parameter for the field , can be null * @ return the builder instance , must not be null */ public JBBPDslBuilder VarArray ( final String name , final String sizeExpression , final String param ) { } }
return this . CustomArray ( "var" , name , sizeExpression , param ) ;
public class ConsoleLogHandler { /** * Set the writers for SystemOut and SystemErr respectfully * @ param sysLogHolder SystemLogHolder object for SystemOut * @ param sysErrHolder SystemLogHolder object for SystemErr */ public void setWriter ( Object sysLogHolder , Object sysErrHolder ) { } }
this . sysOutHolder = ( SystemLogHolder ) sysLogHolder ; this . sysErrHolder = ( SystemLogHolder ) sysErrHolder ;
public class Tile { /** * Extend of this tile in absolute coordinates . * @ return rectangle with the absolute coordinates . */ public Rectangle getBoundaryAbsolute ( ) { } }
return new Rectangle ( getOrigin ( ) . x , getOrigin ( ) . y , getOrigin ( ) . x + tileSize , getOrigin ( ) . y + tileSize ) ;
public class AutoScaleTask { /** * Scale tasks exceptions are absorbed . * @ param request incoming request from request stream . * @ param segments segments to seal * @ param newRanges new ranges for segments to create * @ return CompletableFuture */ private CompletableFuture < Void > postScaleRequest ( final AutoScaleEvent request , final List < Long > segments , final List < Map . Entry < Double , Double > > newRanges , final long requestId ) { } }
ScaleOpEvent event = new ScaleOpEvent ( request . getScope ( ) , request . getStream ( ) , segments , newRanges , false , System . currentTimeMillis ( ) , requestId ) ; return streamMetadataTasks . writeEvent ( event ) ;
public class MkAppTree { /** * Computes the polynomial approximation of the specified knn - distances . * @ param knnDistances the knn - distances of the leaf entry * @ return the polynomial approximation of the specified knn - distances . */ private PolynomialApproximation approximateKnnDistances ( double [ ] knnDistances ) { } }
StringBuilder msg = new StringBuilder ( ) ; // count the zero distances ( necessary of log - log space is used ) int k_0 = 0 ; if ( settings . log ) { for ( int i = 0 ; i < settings . kmax ; i ++ ) { double dist = knnDistances [ i ] ; if ( dist == 0 ) { k_0 ++ ; } else { break ; } } } double [ ] x = new double [ settings . kmax - k_0 ] ; double [ ] y = new double [ settings . kmax - k_0 ] ; for ( int k = 0 ; k < settings . kmax - k_0 ; k ++ ) { if ( settings . log ) { x [ k ] = FastMath . log ( k + k_0 ) ; y [ k ] = FastMath . log ( knnDistances [ k + k_0 ] ) ; } else { x [ k ] = k + k_0 ; y [ k ] = knnDistances [ k + k_0 ] ; } } PolynomialRegression regression = new PolynomialRegression ( y , x , settings . p ) ; PolynomialApproximation approximation = new PolynomialApproximation ( regression . getEstimatedCoefficients ( ) ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "approximation " ) . append ( approximation ) ; LOG . debugFine ( msg . toString ( ) ) ; } return approximation ;
public class COSAPIClient { /** * Checks if container / object contains container / object / _ SUCCESS If so , this * object was created by successful Hadoop job * @ param objectKey * @ return boolean if job is successful */ private boolean isJobSuccessful ( String objectKey ) { } }
LOG . trace ( "isJobSuccessful: for {}" , objectKey ) ; if ( mCachedSparkJobsStatus . containsKey ( objectKey ) ) { LOG . trace ( "isJobSuccessful: {} found cached" , objectKey ) ; return mCachedSparkJobsStatus . get ( objectKey ) . booleanValue ( ) ; } String key = getRealKey ( objectKey ) ; Path p = new Path ( key , HADOOP_SUCCESS ) ; ObjectMetadata statusMetadata = getObjectMetadata ( p . toString ( ) ) ; Boolean isJobOK = Boolean . FALSE ; if ( statusMetadata != null ) { isJobOK = Boolean . TRUE ; } LOG . debug ( "isJobSuccessful: not cached {}. Status is {}" , objectKey , isJobOK ) ; mCachedSparkJobsStatus . put ( objectKey , isJobOK ) ; return isJobOK . booleanValue ( ) ;
public class KAFDocument { /** * Adds a linguistic processor to the document header . The timestamp is added implicitly . */ public LinguisticProcessor addLinguisticProcessor ( String layer , String name ) { } }
LinguisticProcessor lp = new LinguisticProcessor ( name , layer ) ; // lp . setBeginTimestamp ( timestamp ) ; / / no default timestamp List < LinguisticProcessor > layerLps = lps . get ( layer ) ; if ( layerLps == null ) { layerLps = new ArrayList < LinguisticProcessor > ( ) ; lps . put ( layer , layerLps ) ; } layerLps . add ( lp ) ; return lp ;
public class ServiceReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Service ResourceSet */ @ Override public ResourceSet < Service > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class SessionInfo { /** * Encode session information into the provided URL . This will replace * any existing session in that URL . * @ param url * @ param info * @ return String */ public static String encodeURL ( String url , SessionInfo info ) { } }
// could be / path / page # fragment ? query // could be / page / page ; session = existing # fragment ? query // where fragment and query are both optional HttpSession session = info . getSession ( ) ; if ( null == session ) { return url ; } final String id = session . getId ( ) ; final String target = info . getSessionConfig ( ) . getURLRewritingMarker ( ) ; URLParser parser = new URLParser ( url , target ) ; StringBuilder sb = new StringBuilder ( ) ; if ( - 1 != parser . idMarker ) { // a session exists in the URL , overlay this ID sb . append ( url ) ; int start = parser . idMarker + target . length ( ) ; if ( start + 23 < url . length ( ) ) { sb . replace ( start , start + 23 , id ) ; } else { // invalid length on existing session , just remove that // TODO : what if a fragment or query string was after the // invalid session data sb . setLength ( parser . idMarker ) ; sb . append ( target ) . append ( id ) ; } } else { // add session data to the URL if ( - 1 != parser . fragmentMarker ) { // prepend it before the uri fragment sb . append ( url , 0 , parser . fragmentMarker ) ; sb . append ( target ) . append ( id ) ; sb . append ( url , parser . fragmentMarker , url . length ( ) ) ; } else if ( - 1 != parser . queryMarker ) { // prepend it before the query data sb . append ( url , 0 , parser . queryMarker ) ; sb . append ( target ) . append ( id ) ; sb . append ( url , parser . queryMarker , url . length ( ) ) ; } else { // just a uri sb . append ( url ) . append ( target ) . append ( id ) ; } } return sb . toString ( ) ;
public class DbRemoteConfigLoader { /** * 加载远程application . yml配置 */ @ Override public void loadRemoteConfig ( ) { } }
try { // 加载远程adapter配置 ConfigItem configItem = getRemoteAdapterConfig ( ) ; if ( configItem != null ) { if ( configItem . getModifiedTime ( ) != currentConfigTimestamp ) { currentConfigTimestamp = configItem . getModifiedTime ( ) ; overrideLocalCanalConfig ( configItem . getContent ( ) ) ; logger . info ( "## Loaded remote adapter config: application.yml" ) ; } } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; }
public class GVRPose { /** * Set the world positions for the bones in this pose ( relative to skeleton root ) . * The world space positions for each bone are copied from the * source array of vectors in the order of their bone index . * The array must be as large as the number of bones in the skeleton * ( which can be obtained by calling { @ link # getNumBones } ) . * All bones in the skeleton start out at the origin oriented along the bone axis ( usually 0,0,1 ) . * The pose orients and positions each bone in the skeleton with respect to this initial state . * The world bone matrix expresses the orientation and position of the bone relative * to the root of the skeleton . This function sets the world space bone positions * from an array of vectors . The bone orientations are unaffected and it is up to the * caller to make sure these positions are compatible with the current pose rotations . * @ param positionsarray with the positions in world coordinates . * @ see # setWorldRotations * @ see # setWorldMatrix * @ see # getWorldPositions */ public void setWorldPositions ( float [ ] positions ) { } }
if ( positions . length != mBones . length * 3 ) { throw new IllegalArgumentException ( "Destination array is the wrong size" ) ; } mNeedSync = true ; for ( int i = 0 ; i < mBones . length ; ++ i ) { Bone bone = mBones [ i ] ; int t = i * 3 ; bone . setWorldPosition ( positions [ t ] , positions [ t + 1 ] , positions [ t + 2 ] ) ; bone . Changed = WORLD_POS ; if ( sDebug ) { Log . d ( "BONE" , "setWorldPosition: %s %s" , mSkeleton . getBoneName ( i ) , bone . toString ( ) ) ; } } sync ( ) ;
public class EbInterfaceValidator { /** * Create a new validation builder . * @ param aClass * The UBL class to be validated . May not be < code > null < / code > . * @ return The new validation builder . Never < code > null < / code > . * @ param < T > * The ebInterface document implementation type */ @ Nonnull public static < T > EbInterfaceValidator < T > create ( @ Nonnull final Class < T > aClass ) { } }
return new EbInterfaceValidator < > ( aClass ) ;
public class AccountAggregationSource { /** * The 12 - digit account ID of the account being aggregated . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAccountIds ( java . util . Collection ) } or { @ link # withAccountIds ( java . util . Collection ) } if you want to * override the existing values . * @ param accountIds * The 12 - digit account ID of the account being aggregated . * @ return Returns a reference to this object so that method calls can be chained together . */ public AccountAggregationSource withAccountIds ( String ... accountIds ) { } }
if ( this . accountIds == null ) { setAccountIds ( new com . amazonaws . internal . SdkInternalList < String > ( accountIds . length ) ) ; } for ( String ele : accountIds ) { this . accountIds . add ( ele ) ; } return this ;
public class PolicyInformation { /** * Write the PolicyInformation to the DerOutputStream . * @ param out the DerOutputStream to write the extension to . * @ exception IOException on encoding errors . */ public void encode ( DerOutputStream out ) throws IOException { } }
DerOutputStream tmp = new DerOutputStream ( ) ; policyIdentifier . encode ( tmp ) ; if ( ! policyQualifiers . isEmpty ( ) ) { DerOutputStream tmp2 = new DerOutputStream ( ) ; for ( PolicyQualifierInfo pq : policyQualifiers ) { tmp2 . write ( pq . getEncoded ( ) ) ; } tmp . write ( DerValue . tag_Sequence , tmp2 ) ; } out . write ( DerValue . tag_Sequence , tmp ) ;
public class SimpleStringSequence { /** * Returns the next generated value . * @ return the next generated value . * @ throws java . util . NoSuchElementException if the iteration has no more value . */ @ Override public String next ( ) { } }
if ( ! hasNext ( ) ) { throw new NoSuchElementException ( toString ( ) + " ended" ) ; } current = String . format ( format , generator . next ( ) ) ; return current ( ) ;
public class CommerceAddressRestrictionUtil { /** * Returns the first commerce address restriction in the ordered set where classNameId = & # 63 ; and classPK = & # 63 ; . * @ param classNameId the class name ID * @ param classPK the class pk * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce address restriction , or < code > null < / code > if a matching commerce address restriction could not be found */ public static CommerceAddressRestriction fetchByC_C_First ( long classNameId , long classPK , OrderByComparator < CommerceAddressRestriction > orderByComparator ) { } }
return getPersistence ( ) . fetchByC_C_First ( classNameId , classPK , orderByComparator ) ;
public class NetworkMonitor { /** * Begin monitoring changes in the device ' s network connection . * Before using this method , be sure to pass a { @ link NetworkConnectionListener } to * { @ link # NetworkMonitor ( Context , NetworkConnectionListener ) } . */ public void startMonitoringNetworkChanges ( ) { } }
if ( networkReceiver != null ) { IntentFilter filter = new IntentFilter ( ConnectivityManager . CONNECTIVITY_ACTION ) ; context . registerReceiver ( networkReceiver , filter ) ; } else { logger . warn ( "Cannot monitor device network changes. Make sure that a NetworkConnectionListener was passed to the NetworkMonitor constructor." ) ; }
public class ManagementResources { /** * Updates the admin privileges for the user . * @ param req The HTTP request . * @ param userName Name of the user whom the admin privileges will be updated . Cannot be null or empty . * @ param privileged boolean variable indicating admin privileges . * @ return Response object indicating whether the operation was successful or not . * @ throws IllegalArgumentException Throws IllegalArgument exception when the input is not valid . * @ throws WebApplicationException Throws this exception if the user does not exist or the user is not authorized to carry out this operation . */ @ PUT @ Path ( "/administratorprivilege" ) @ Produces ( MediaType . APPLICATION_JSON ) @ Description ( "Grants administrative privileges." ) public Response setAdministratorPrivilege ( @ Context HttpServletRequest req , @ FormParam ( "username" ) String userName , @ FormParam ( "privileged" ) boolean privileged ) { } }
if ( userName == null || userName . isEmpty ( ) ) { throw new IllegalArgumentException ( "User name cannot be null or empty." ) ; } validatePrivilegedUser ( req ) ; PrincipalUser user = userService . findUserByUsername ( userName ) ; if ( user == null ) { throw new WebApplicationException ( "User does not exist." , Status . NOT_FOUND ) ; } managementService . setAdministratorPrivilege ( user , privileged ) ; return Response . status ( Status . OK ) . build ( ) ;
public class MemcachedSessionService { /** * { @ inheritDoc } */ public MemcachedBackupSession createEmptySession ( ) { } }
final MemcachedBackupSession result = _manager . newMemcachedBackupSession ( ) ; result . setSticky ( _sticky ) ; return result ;
public class IntegerCLA { /** * { @ inheritDoc } */ @ Override public Integer convert ( final String valueStr , final boolean _caseSensitive , final Object target ) throws ParseException { } }
return FMT . parse ( valueStr ) . intValue ( ) ;
public class MethodAttribUtils { /** * F743-1752.1 added entire method . */ public static void getAnnotationCMCLockType ( LockType [ ] lockType , Method [ ] ejbMethods , BeanMetaData bmd ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "getAnnotationCMCLockType: " + " methods = " + Arrays . toString ( ejbMethods ) ) ; // F743-7027.1 } LockType methodLockType = null ; boolean metadataComplete = bmd . metadataComplete ; // For each of the business methods , determine the lock type // value to use if not already set by the ejb - jar . xml processing . for ( int i = 0 ; i < ejbMethods . length ; i ++ ) { Method beanMethod = ejbMethods [ i ] ; // Only use the value from annotations if it is not already set // from value in ejb - jar . xml ( e . g . obtained via WCCM ) . methodLockType = lockType [ i ] ; if ( methodLockType == null ) { // ejb - jar . xml did not contain the lock type for this method . // Metadata complete in XML ? if ( metadataComplete ) { // metadata complete is true in ejb - jar . xml file , so use the default value for LockType . methodLockType = LockType . WRITE ; } else { // metadata complete is false , so see if there is a @ Lock annotation on this method . Lock annotation = beanMethod . getAnnotation ( Lock . class ) ; if ( annotation == null ) { // No @ Lock annotation on method , see if there is one at class level of // the class that declared this method object . Class < ? > c = beanMethod . getDeclaringClass ( ) ; annotation = c . getAnnotation ( Lock . class ) ; if ( isTraceOn && tc . isDebugEnabled ( ) && annotation != null ) { Tr . debug ( tc , beanMethod . getName ( ) + " from class " + c . getName ( ) ) ; } } // Did we find a @ Lock annotation at method or class level ? if ( annotation == null ) { // no @ Lock annotation found , so use default lock type . methodLockType = LockType . WRITE ; } else { // there is a @ Lock annotation , so use it . methodLockType = annotation . value ( ) ; } } // end if ( metadataComplete ) // Set the lock type for this method as determined by above logic . lockType [ i ] = methodLockType ; // F743-1752CodRev if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , beanMethod . getName ( ) + " = " + methodLockType ) ; } } // end if ( methodLockType = = null ) else { // The ejb - jar . xml did specify a lock type for this method . Check // to see if overriding an explicit lock ty0e specified by the // Bean Provider via annotation . EJB spec that DD should not // override an explicitly provided value by Bean Provider , but // we can not tell whether DD is from Bean Provider vs application // assembler . Even if application assembler , it could be Bean Provider // is fixing an application error via the DD rather than changing the // annotations . So we will doing nothing more that trace the fact // that it occured . boolean traceEnabled = isTraceOn && ( tcDebug . isDebugEnabled ( ) || tc . isDebugEnabled ( ) ) ; if ( traceEnabled ) { // Is there an annotation for this method to use ? Lock annotation = beanMethod . getAnnotation ( Lock . class ) ; if ( annotation == null ) { // No , is one specified at declaring class level for this method ? Class < ? > c = beanMethod . getDeclaringClass ( ) ; annotation = c . getAnnotation ( Lock . class ) ; } // If an annotation for lock type was specified and it was overridden by DD to // be a different value , then we want to trace that it was changed since it could // potentially be causing an application failure . It could also be fixing an // application error , which is why we only trace the occurance . if ( annotation != null && ( annotation . value ( ) . equals ( methodLockType ) == false ) ) { // Change in lock type , build a warning message and trace it . String msg ; if ( methodLockType == LockType . WRITE ) { msg = "ejb-jar.xml is overriding a @Lock(READ) with a write lock. This may cause a deadlock to occur." ; } else { msg = "ejb-jar.xml is overriding a @Lock(WRITE) with a read lock. This may cause data integrity problems." ; } StringBuilder sb = new StringBuilder ( ) ; sb . append ( "warning, for the " ) . append ( beanMethod . toString ( ) ) . append ( " method, the " ) . append ( msg ) ; if ( tcDebug . isDebugEnabled ( ) ) { Tr . debug ( tcDebug , sb . toString ( ) ) ; } else { Tr . debug ( tc , sb . toString ( ) ) ; } } } } } // end for if ( isTraceOn && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getAnnotationCMCLockType" , Arrays . toString ( lockType ) ) ; }
public class Node { /** * Remove the subscription related to the specified JID . This will only * work if there is only 1 subscription . If there are multiple subscriptions , * use { @ link # unsubscribe ( String , String ) } . * @ param jid The JID used to subscribe to the node * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public void unsubscribe ( String jid ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
unsubscribe ( jid , null ) ;
public class SourceParams { /** * Create parameters necessary to create a SOFORT source . * @ param amount A positive integer in the smallest currency unit representing the amount to * charge the customer ( e . g . , 1099 for a € 10.99 payment ) . * @ param returnUrl The URL the customer should be redirected to after the authorization * process . * @ param country The ISO - 3166 2 - letter country code of the customer ’ s bank . * @ param statementDescriptor A custom statement descriptor for the payment ( optional ) . * @ return a { @ link SourceParams } object that can be used to create a SOFORT source * @ see < a href = " https : / / stripe . com / docs / sources / sofort " > https : / / stripe . com / docs / sources / sofort < / a > */ @ NonNull public static SourceParams createSofortParams ( @ IntRange ( from = 0 ) long amount , @ NonNull String returnUrl , @ NonNull @ Size ( 2 ) String country , @ Nullable String statementDescriptor ) { } }
final SourceParams params = new SourceParams ( ) . setType ( Source . SOFORT ) . setCurrency ( Source . EURO ) . setAmount ( amount ) . setRedirect ( createSimpleMap ( FIELD_RETURN_URL , returnUrl ) ) ; final Map < String , Object > sofortMap = createSimpleMap ( FIELD_COUNTRY , country ) ; if ( statementDescriptor != null ) { sofortMap . put ( FIELD_STATEMENT_DESCRIPTOR , statementDescriptor ) ; } return params . setApiParameterMap ( sofortMap ) ;
public class CGLIBDynamicProxyWeaving { /** * 方法调用 需要拦截方法在这里实现 。 目前实现arround intercept * @ param p _ proxy * Object * @ param m * Method * @ param args * Object [ ] * @ throws Throwable * @ return Object */ public Object invoke ( Object p_proxy , Method m , Object [ ] args ) throws Throwable { } }
Debug . logVerbose ( "<################################>Action: JdonFramework core entrance" , module ) ; Debug . logVerbose ( "[JdonFramework]<################>execute method=" + m . getDeclaringClass ( ) . getName ( ) + "." + m . getName ( ) , module ) ; Object result = null ; try { result = aopClient . invoke ( targetMetaRequest , m , args ) ; Debug . logVerbose ( "[JdonFramework]<################>finish executing method=" + m . getDeclaringClass ( ) . getName ( ) + "." + m . getName ( ) + " successfully!" , module ) ; Debug . logVerbose ( "<################################><end:" , module ) ; } catch ( Exception ex ) { Debug . logError ( ex , module ) ; } catch ( Throwable ex ) { throw new Throwable ( ex ) ; } return result ;
public class XmlPersonAttributeDao { /** * / * ( non - Javadoc ) * @ see org . jasig . services . persondir . IPersonAttributeDao # getAvailableQueryAttributes ( ) */ @ Override public Set < String > getAvailableQueryAttributes ( final IPersonAttributeDaoFilter filter ) { } }
this . jaxbLoader . getUnmarshalledObject ( this . attributeLoader ) ; return this . attributesCache ;
public class TopologyMetricContext { /** * sum histograms */ public void sumMetricSnapshot ( MetricSnapshot metricSnapshot , MetricSnapshot snapshot ) { } }
metricSnapshot . set_min ( metricSnapshot . get_min ( ) + snapshot . get_min ( ) ) ; metricSnapshot . set_max ( metricSnapshot . get_max ( ) + snapshot . get_max ( ) ) ; metricSnapshot . set_p50 ( metricSnapshot . get_p50 ( ) + snapshot . get_p50 ( ) ) ; metricSnapshot . set_p75 ( metricSnapshot . get_p75 ( ) + snapshot . get_p75 ( ) ) ; metricSnapshot . set_p95 ( metricSnapshot . get_p95 ( ) + snapshot . get_p95 ( ) ) ; metricSnapshot . set_p98 ( metricSnapshot . get_p98 ( ) + snapshot . get_p98 ( ) ) ; metricSnapshot . set_p99 ( metricSnapshot . get_p99 ( ) + snapshot . get_p99 ( ) ) ; metricSnapshot . set_p999 ( metricSnapshot . get_p999 ( ) + snapshot . get_p999 ( ) ) ; metricSnapshot . set_mean ( metricSnapshot . get_mean ( ) + snapshot . get_mean ( ) ) ; metricSnapshot . set_stddev ( metricSnapshot . get_stddev ( ) + snapshot . get_stddev ( ) ) ;
public class M3UAManagementImpl { /** * Load and create LinkSets and Link from persisted file * @ throws Exception */ public void load ( ) throws FileNotFoundException { } }
XMLObjectReader reader = null ; try { this . preparePersistFile ( ) ; File f = new File ( persistFile . toString ( ) ) ; if ( f . exists ( ) ) { // we have V2 config loadVer2 ( persistFile . toString ( ) ) ; } else { String s1 = persistFile . toString ( ) . replace ( "1.xml" , ".xml" ) ; f = new File ( s1 ) ; if ( f . exists ( ) ) { loadVer1 ( s1 ) ; } this . store ( ) ; f . delete ( ) ; } } catch ( XMLStreamException ex ) { logger . error ( String . format ( "Failed to load the M3UA configuration file. \n%s" , ex . getMessage ( ) ) ) ; } catch ( FileNotFoundException e ) { logger . warn ( String . format ( "Failed to load the M3UA configuration file. \n%s" , e . getMessage ( ) ) ) ; } catch ( IOException e ) { logger . error ( String . format ( "Failed to load the M3UA configuration file. \n%s" , e . getMessage ( ) ) ) ; }
public class Result { /** * For BATCHEXECDIRECT */ public static Result newBatchedExecuteRequest ( ) { } }
Type [ ] types = new Type [ ] { Type . SQL_VARCHAR } ; Result result = newResult ( ResultConstants . BATCHEXECDIRECT ) ; result . metaData = ResultMetaData . newSimpleResultMetaData ( types ) ; return result ;
public class MicrochipPotentiometerBase { /** * Decreases the wiper ' s value for n steps . * @ param steps The number of steps to decrease * @ throws IOException Thrown if communication fails or device returned a malformed result */ @ Override public void decrease ( final int steps ) throws IOException { } }
if ( currentValue == 0 ) { return ; } if ( steps < 0 ) { throw new RuntimeException ( "Only positive values for parameter 'steps' allowed!" ) ; } if ( getNonVolatileMode ( ) != MicrochipPotentiometerNonVolatileMode . VOLATILE_ONLY ) { throw new RuntimeException ( "'decrease' is only valid for NonVolatileMode.VOLATILE_ONLY!" ) ; } // check boundaries final int actualSteps ; if ( steps > currentValue ) { actualSteps = currentValue ; } else { actualSteps = steps ; } int newValue = currentValue - actualSteps ; // if lower - boundary then set value in device to ensure sync // and for a large number of steps it is better to set a new value if ( ( newValue == 0 ) || ( steps > 5 ) ) { setCurrentValue ( newValue ) ; } // for a small number of steps use ' decrease ' - method else { controller . decrease ( DeviceControllerChannel . valueOf ( channel ) , actualSteps ) ; currentValue = newValue ; }
public class ICUHumanize { /** * Computes both past and future relative dates . * E . g . " 1 day ago " , " 1 day from now " , " 10 years ago " , " 3 minutes from now " * and so on . * @ param reference * Date to be used as reference * @ param duration * Date to be used as duration from reference * @ return String representing the relative date */ public static String naturalTime ( final Date reference , final Date duration ) { } }
long diff = duration . getTime ( ) - reference . getTime ( ) ; return context . get ( ) . getDurationFormat ( ) . formatDurationFrom ( diff , reference . getTime ( ) ) ;
public class Users { /** * Delete a user . * @ param userId * @ return Hash instance with API ' s response . */ public Hash deleteUser ( int userId ) throws AuthyException { } }
final Response response = this . post ( DELETE_USER_PATH + Integer . toString ( userId ) , null ) ; return instanceFromJson ( response . getStatus ( ) , response . getBody ( ) ) ;
public class PactDslJsonArray { /** * Element that must be an ISO formatted time */ public PactDslJsonArray time ( ) { } }
String pattern = DateFormatUtils . ISO_TIME_FORMAT . getPattern ( ) ; body . put ( DateFormatUtils . ISO_TIME_FORMAT . format ( new Date ( DATE_2000 ) ) ) ; generators . addGenerator ( Category . BODY , rootPath + appendArrayIndex ( 0 ) , new TimeGenerator ( pattern ) ) ; matchers . addRule ( rootPath + appendArrayIndex ( 0 ) , matchTime ( pattern ) ) ; return this ;