signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class SourceReportService { /** * Find the SourceReportModel instance for this fileModel ( this is a 1:1 relationship ) . */
public SourceReportModel getSourceReportForFileModel ( FileModel fileModel ) { } } | GraphTraversal < Vertex , Vertex > pipeline = new GraphTraversalSource ( getGraphContext ( ) . getGraph ( ) ) . V ( fileModel . getElement ( ) ) ; pipeline . in ( SourceReportModel . SOURCE_REPORT_TO_SOURCE_FILE_MODEL ) ; SourceReportModel result = null ; if ( pipeline . hasNext ( ) ) { result = frame ( pipeline . next ( ) ) ; } return result ; |
public class SiteConfigurationReader { /** * Reads the { @ value # SITE _ FILE } of the < code > templateFolder < / code > and merges with < code > templateFolder / controller < / code > ,
* if any exists .
* flow :
* < ol >
* < li > look in < code > templateFolder / controller < / code > < / li >
* < li > look in < code > templateFolder / layoutPath < / code > and merge ( if the layoutPath is not already in < code > templateFolder / controller < / code > ) < / li >
* < li > continue on to look in < code > templateFolder < / code > ( root ) and merge < / li >
* < / ol >
* @ param templateFolder
* The root folder to read from . The { @ value # SITE _ FILE } in this location is the default configuration file .
* Same as the index . html is the default layout .
* @ param controller
* The folder for the controller that is executed . This folder might hold an additional { @ value # SITE _ FILE } ,
* which will be used to merge with the default configuration file .
* This takes precedence over the values in default configuration file .
* @ param layoutPath
* A layout can be specified which is not at < code > templateFolder < / code > . < br >
* Might be an empty string if default layout is used . < br >
* Can be the same as controller if both are " index "
* @ param useCache
* Use caching of the read configuration
* @ return
* The read configuration .
* This might be the default configuration , or the merged configuration , or only the controller configuration
* if the controller configuration has { @ link SiteConfiguration # overrideDefault } set to override everything . */
public SiteConfiguration read ( String templateFolder , String controller , String layoutPath , boolean useCache ) { } } | Path rootFolder = Paths . get ( templateFolder ) ; // find eventual extra configurations in the controller folder
// we skip path . controller + ' / ' + path . method because we only look for other configurations on controller level
SiteConfiguration controllerConf = readSiteFileWithCache ( rootFolder . resolve ( controller ) , useCache ) ; if ( controllerConf . overrideDefault ) return controllerConf ; // Use the # SITE _ FILE near the index . html ( a . k . a . layout ) , if layout is NOT located within the controller path
// In this way we have the conf from controller AND near the layout
// ( which might need some information from the conf near it )
if ( ! layoutPath . isEmpty ( ) && ! isLayoutInControllerFolder ( controller , layoutPath ) ) { Path layoutFolder = rootFolder . resolve ( layoutPath ) ; SiteConfiguration controllerPlusLayoutConf = mergeSiteFilesWithCache ( layoutFolder , controller , controllerConf , useCache ) ; if ( controllerPlusLayoutConf . overrideDefault ) { return controllerPlusLayoutConf ; } controllerConf = controllerPlusLayoutConf ; controller = createPathIdentification ( layoutPath , controller ) ; } // Aaand we also look for it at the very root
return mergeSiteFilesWithCache ( rootFolder , controller , controllerConf , useCache ) ; |
public class Stack { /** * The actions that are enabled or disabled for users during their streaming sessions . By default these actions are
* enabled .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setUserSettings ( java . util . Collection ) } or { @ link # withUserSettings ( java . util . Collection ) } if you want to
* override the existing values .
* @ param userSettings
* The actions that are enabled or disabled for users during their streaming sessions . By default these
* actions are enabled .
* @ return Returns a reference to this object so that method calls can be chained together . */
public Stack withUserSettings ( UserSetting ... userSettings ) { } } | if ( this . userSettings == null ) { setUserSettings ( new java . util . ArrayList < UserSetting > ( userSettings . length ) ) ; } for ( UserSetting ele : userSettings ) { this . userSettings . add ( ele ) ; } return this ; |
public class JcrDocumentViewExporter { /** * Indicates whether the current node is an XML text node as per section 6.4.2.3 of the JCR 1.0 specification . XML text nodes
* are nodes that have the name & quot ; jcr : xmltext & quot ; and only one property ( besides the mandatory
* & quot ; jcr : primaryType & quot ; ) . The property must have a property name of & quot ; jcr : xmlcharacters & quot ; , a type of
* < code > String < / code > , and does not have multiple values .
* In practice , this is handled in ModeShape by making XML text nodes have a type of & quot ; dna : xmltext & quot ; , which enforces
* these property characteristics .
* @ param node the node to test
* @ return whether this node is a special xml text node
* @ throws RepositoryException if there is an error accessing the repository */
private boolean isXmlTextNode ( Node node ) throws RepositoryException { } } | // . / xmltext / xmlcharacters exception ( see JSR - 170 Spec 6.4.2.3)
if ( getPrefixedName ( JcrLexicon . XMLTEXT ) . equals ( node . getName ( ) ) ) { if ( node . getNodes ( ) . getSize ( ) == 0 ) { PropertyIterator properties = node . getProperties ( ) ; boolean xmlCharactersFound = false ; while ( properties . hasNext ( ) ) { Property property = properties . nextProperty ( ) ; if ( getPrefixedName ( JcrLexicon . PRIMARY_TYPE ) . equals ( property . getName ( ) ) ) { continue ; } if ( getPrefixedName ( JcrLexicon . XMLCHARACTERS ) . equals ( property . getName ( ) ) ) { xmlCharactersFound = true ; continue ; } // If the xmltext node has any properties other than primaryType or xmlcharacters , return false ;
return false ; } return xmlCharactersFound ; } } return false ; |
public class RamlHelper { /** * Returns authorization grant for provided action . It searches for
* authorization grants defined for provided action , some of parent
* resources or the root of the document . If authorization grants found is a
* list - the method will return the first grant in the list .
* @ param action
* action to find grant for
* @ param document
* root raml document
* @ return first grant found , null otherwise */
public static String getFirstAuthorizationGrant ( RamlAction action , RamlRoot document ) { } } | List < String > grants = getAuthorizationGrants ( action , document ) ; if ( grants . isEmpty ( ) ) { return null ; } return grants . get ( 0 ) ; |
public class TreeTraverser { /** * Returns an unmodifiable iterable over the nodes in a tree structure , using pre - order
* traversal . That is , each node ' s subtrees are traversed after the node itself is returned .
* < p > No guarantees are made about the behavior of the traversal when nodes change while
* iteration is in progress or when the iterators generated by { @ link # children } are advanced . */
public final FluentIterable < T > preOrderTraversal ( final T root ) { } } | checkNotNull ( root ) ; return new FluentIterable < T > ( ) { @ Override public UnmodifiableIterator < T > iterator ( ) { return preOrderIterator ( root ) ; } } ; |
public class FlexiantComputeClient { /** * Loads the vdc with the given uuid .
* @ param vdcUUID the uuid of the vdc .
* @ return the vdc identified by the uuid or null .
* @ throws FlexiantException */
@ Nullable protected Vdc getVdc ( final String vdcUUID ) throws FlexiantException { } } | return this . getResource ( vdcUUID , ResourceType . VDC , Vdc . class ) ; |
public class DwgAttrib { /** * Read an Attrib in the DWG format Version 15
* @ param data Array of unsigned bytes obtained from the DWG binary file
* @ param offset The current bit offset where the value begins
* @ throws Exception If an unexpected bit value is found in the DWG file . Occurs
* when we are looking for LwPolylines . */
public void readDwgAttribV15 ( int [ ] data , int offset ) throws Exception { } } | // System . out . println ( " readDwgAttdef ( ) executed . . . " ) ;
int bitPos = offset ; bitPos = readObjectHeaderV15 ( data , bitPos ) ; Vector v = DwgUtil . getRawChar ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int dflag = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; dataFlag = dflag ; if ( ( dflag & 0x1 ) == 0 ) { v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double elev = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; elevation = elev ; } v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double x1 = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double y1 = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; insertionPoint = new Point2D . Double ( x1 , y1 ) ; double x = 0 , y = 0 , z = 0 ; if ( ( dflag & 0x2 ) == 0 ) { v = DwgUtil . getDefaultDouble ( data , bitPos , x1 ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; x = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getDefaultDouble ( data , bitPos , y1 ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; y = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; } alignmentPoint = new Point2D . Double ( x , y ) ; v = DwgUtil . testBit ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; boolean flag = ( ( Boolean ) v . get ( 1 ) ) . booleanValue ( ) ; if ( flag ) { y = 0.0 ; x = y ; z = 1.0 ; } else { v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; x = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; y = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; z = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; } extrusion = new double [ ] { x , y , z } ; v = DwgUtil . testBit ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; flag = ( ( Boolean ) v . get ( 1 ) ) . booleanValue ( ) ; double th ; if ( flag ) { th = 0.0 ; } else { v = DwgUtil . getBitDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; th = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; } thickness = th ; if ( ( dflag & 0x4 ) == 0 ) { v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double oblique = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; obliqueAngle = oblique ; } if ( ( dflag & 0x8 ) == 0 ) { v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double rot = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; rotationAngle = rot ; } v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double height = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; this . height = height ; if ( ( dflag & 0x10 ) == 0 ) { v = DwgUtil . getRawDouble ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; double width = ( ( Double ) v . get ( 1 ) ) . doubleValue ( ) ; widthFactor = width ; } v = DwgUtil . getTextString ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; String text = ( String ) v . get ( 1 ) ; this . text = text ; if ( ( dflag & 0x20 ) == 0 ) { v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int gen = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; generation = gen ; } if ( ( dflag & 0x40 ) == 0 ) { v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int halign = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; this . halign = halign ; } if ( ( dflag & 0x80 ) == 0 ) { v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int valign = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; this . valign = valign ; } v = DwgUtil . getTextString ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; String tag = ( String ) v . get ( 1 ) ; this . tag = tag ; v = DwgUtil . getBitShort ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int fl = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; fieldLength = fl ; v = DwgUtil . getRawChar ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int flags = ( ( Integer ) v . get ( 1 ) ) . intValue ( ) ; this . flags = flags ; bitPos = readObjectTailV15 ( data , bitPos ) ; v = DwgUtil . getHandle ( data , bitPos ) ; bitPos = ( ( Integer ) v . get ( 0 ) ) . intValue ( ) ; int [ ] handle = new int [ v . size ( ) - 1 ] ; for ( int j = 1 ; j < v . size ( ) ; j ++ ) { handle [ j - 1 ] = ( ( Integer ) v . get ( j ) ) . intValue ( ) ; } Vector handleVect = new Vector ( ) ; for ( int i = 0 ; i < handle . length ; i ++ ) { handleVect . add ( new Integer ( handle [ i ] ) ) ; } styleHandle = DwgUtil . handleBinToHandleInt ( handleVect ) ; |
public class ByteSequence { /** * Compares the two given byte sequences , byte by byte , returning a negative ,
* zero , or positive result if the first sequence is less than , equal to , or
* greater than the second . The comparison is performed starting with the
* first byte of each sequence , and proceeds until a pair of bytes differs ,
* or one sequence runs out of byte ( is shorter ) . A shorter sequence is
* considered less than a longer one .
* @ param bs1 first byte sequence to compare
* @ param bs2 second byte sequence to compare
* @ return comparison result */
public static int compareBytes ( ByteSequence bs1 , ByteSequence bs2 ) { } } | int minLen = Math . min ( bs1 . length ( ) , bs2 . length ( ) ) ; for ( int i = 0 ; i < minLen ; i ++ ) { int a = ( bs1 . byteAt ( i ) & 0xff ) ; int b = ( bs2 . byteAt ( i ) & 0xff ) ; if ( a != b ) { return a - b ; } } return bs1 . length ( ) - bs2 . length ( ) ; |
public class SliderLayout { /** * remove all the sliders . Notice : It ' s a not perfect method , a very small bug still exists . */
public void removeAllSliders ( ) { } } | if ( getRealAdapter ( ) != null ) { int count = getRealAdapter ( ) . getCount ( ) ; getRealAdapter ( ) . removeAllSliders ( ) ; // a small bug , but fixed by this trick .
// bug : when remove adapter ' s all the sliders . some caching slider still alive .
mViewPager . setCurrentItem ( mViewPager . getCurrentItem ( ) + count , false ) ; } |
public class ConditionalCommentNodeProcessorMatcher { /** * Matches the specified { @ link Node } if it is an instance of { @ link Comment }
* AND it conforms to { @ link ConditionalCommentUtils # isConditionalComment ( String ) } .
* @ param node the node to be checked
* @ param context the processor matching context
* @ return true if node is a conditional comment , false if not */
public boolean matches ( final Node node , final ProcessorMatchingContext context ) { } } | if ( node == null || ! ( node instanceof Comment ) ) { // fail fast
return false ; } final Comment comment = ( Comment ) node ; return ConditionalCommentUtils . isConditionalComment ( comment . getContent ( ) ) ; |
public class LinearClassifier { /** * Returns a counter mapping from each class name to the probability of
* that class for a certain example .
* Looking at the the sum of each count v , should be 1.0. */
private Counter < L > probabilityOfRVFDatum ( RVFDatum < L , F > example ) { } } | // NB : this duplicate method is needed so it calls the scoresOf method
// with a RVFDatum signature
Counter < L > scores = logProbabilityOfRVFDatum ( example ) ; for ( L label : scores . keySet ( ) ) { scores . setCount ( label , Math . exp ( scores . getCount ( label ) ) ) ; } return scores ; |
public class SpaceRepository { /** * Remove a remote space .
* @ param id identifier of the space
* @ param isLocalDestruction indicates if the destruction is initiated by the local kernel . */
protected void removeLocalSpaceDefinition ( SpaceID id , boolean isLocalDestruction ) { } } | final Space space ; synchronized ( getSpaceRepositoryMutex ( ) ) { space = this . spaces . remove ( id ) ; if ( space != null ) { this . spacesBySpec . remove ( id . getSpaceSpecification ( ) , id ) ; } } if ( space != null ) { fireSpaceRemoved ( space , isLocalDestruction ) ; } |
public class LoggingSnippets { /** * [ VARIABLE " my _ sink _ name " ] */
public boolean deleteSinkAsync ( String sinkName ) throws ExecutionException , InterruptedException { } } | // [ START deleteSinkAsync ]
Future < Boolean > future = logging . deleteSinkAsync ( sinkName ) ; boolean deleted = future . get ( ) ; if ( deleted ) { // the sink was deleted
} else { // the sink was not found
} // [ END deleteSinkAsync ]
return deleted ; |
public class PKCS9Attributes { /** * Decode this set of PKCS9 attributes from the contents of its
* DER encoding . Ignores unsupported attributes when directed .
* @ param in
* the contents of the DER encoding of the attribute set .
* @ exception IOException
* on i / o error , encoding syntax error , unacceptable or
* unsupported attribute , or duplicate attribute . */
private byte [ ] decode ( DerInputStream in ) throws IOException { } } | DerValue val = in . getDerValue ( ) ; // save the DER encoding with its proper tag byte .
byte [ ] derEncoding = val . toByteArray ( ) ; derEncoding [ 0 ] = DerValue . tag_SetOf ; DerInputStream derIn = new DerInputStream ( derEncoding ) ; DerValue [ ] derVals = derIn . getSet ( 3 , true ) ; PKCS9Attribute attrib ; ObjectIdentifier oid ; boolean reuseEncoding = true ; for ( int i = 0 ; i < derVals . length ; i ++ ) { try { attrib = new PKCS9Attribute ( derVals [ i ] ) ; } catch ( ParsingException e ) { if ( ignoreUnsupportedAttributes ) { reuseEncoding = false ; // cannot reuse supplied DER encoding
continue ; // skip
} else { throw e ; } } oid = attrib . getOID ( ) ; if ( attributes . get ( oid ) != null ) throw new IOException ( "Duplicate PKCS9 attribute: " + oid ) ; if ( permittedAttributes != null && ! permittedAttributes . containsKey ( oid ) ) throw new IOException ( "Attribute " + oid + " not permitted in this attribute set" ) ; attributes . put ( oid , attrib ) ; } return reuseEncoding ? derEncoding : generateDerEncoding ( ) ; |
public class CloseableIterators { /** * Divides a closeableiterator into unmodifiable sublists of the given size ( the final
* list may be smaller ) . For example , partitioning a closeableiterator containing
* { @ code [ a , b , c , d , e ] } with a partition size of 3 yields { @ code
* [ [ a , b , c ] , [ d , e ] ] } - - an outer iterator containing two inner lists of
* three and two elements , all in the original order .
* < p > The returned lists implement { @ link java . util . RandomAccess } . */
public static < T > CloseableIterator < List < T > > partition ( final CloseableIterator < T > iterator , final int size ) { } } | return wrap ( Iterators . partition ( iterator , size ) , iterator ) ; |
public class ExtensionLoader { /** * Notifies that the properties ( e . g . name , description ) of the current session were changed .
* Should be called only by " core " classes .
* @ param session the session changed .
* @ since 2.7.0 */
public void sessionPropertiesChangedAllPlugin ( Session session ) { } } | logger . debug ( "sessionPropertiesChangedAllPlugin" ) ; for ( ExtensionHook hook : extensionHooks . values ( ) ) { for ( SessionChangedListener listener : hook . getSessionListenerList ( ) ) { try { if ( listener != null ) { listener . sessionPropertiesChanged ( session ) ; } } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } } |
public class MpTransactionState { /** * Restart this fragment after the fragment is mis - routed from MigratePartitionLeader
* If the masters have been updated , the fragment will be routed to its new master . The fragment will be routed to the old master .
* until new master is updated .
* @ param message The mis - routed response message
* @ param partitionMastersMap The current partition masters */
public void restartFragment ( FragmentResponseMessage message , List < Long > masters , Map < Integer , Long > partitionMastersMap ) { } } | final int partionId = message . getPartitionId ( ) ; Long restartHsid = partitionMastersMap . get ( partionId ) ; Long hsid = message . getExecutorSiteId ( ) ; if ( ! hsid . equals ( restartHsid ) ) { m_masterMapForFragmentRestart . clear ( ) ; m_masterMapForFragmentRestart . put ( restartHsid , hsid ) ; // The very first fragment is to be rerouted to the new leader , then all the follow - up fragments are routed
// to new leaders .
updateMasters ( masters , partitionMastersMap ) ; } if ( restartHsid == null ) { restartHsid = hsid ; } if ( tmLog . isDebugEnabled ( ) ) { tmLog . debug ( "Rerouted fragment from " + CoreUtils . hsIdToString ( hsid ) + " to " + CoreUtils . hsIdToString ( restartHsid ) + "\n" + m_remoteWork ) ; } m_fragmentRestarted = true ; m_mbox . send ( restartHsid , m_remoteWork ) ; |
public class VoltCompiler { /** * Compile from a set of DDL files .
* @ param jarOutputPath The location to put the finished JAR to .
* @ param ddlFilePaths The array of DDL files to compile ( at least one is required ) .
* @ return true if successful
* @ throws VoltCompilerException */
public boolean compileFromDDL ( final String jarOutputPath , final String ... ddlFilePaths ) { } } | if ( ddlFilePaths . length == 0 ) { compilerLog . error ( "At least one DDL file is required." ) ; return false ; } List < VoltCompilerReader > ddlReaderList ; try { ddlReaderList = DDLPathsToReaderList ( ddlFilePaths ) ; } catch ( VoltCompilerException e ) { compilerLog . error ( "Unable to open DDL file." , e ) ; return false ; } return compileInternalToFile ( jarOutputPath , null , null , ddlReaderList , null ) ; |
public class BigFloat { /** * Returns the the maximum of two { @ link BigFloat } values .
* @ param value1 the first { @ link BigFloat } value to compare
* @ param value2 the second { @ link BigFloat } value to compare
* @ return the maximum { @ link BigFloat } value */
public static BigFloat max ( BigFloat value1 , BigFloat value2 ) { } } | return value1 . compareTo ( value2 ) >= 0 ? value1 : value2 ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EEnum getIfcRoleEnum ( ) { } } | if ( ifcRoleEnumEEnum == null ) { ifcRoleEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1054 ) ; } return ifcRoleEnumEEnum ; |
public class SQLiteDatabase { /** * Runs the provided SQL and returns a cursor over the result set .
* @ param cursorFactory the cursor factory to use , or null for the default factory
* @ param sql the SQL query . The SQL string must not be ; terminated
* @ param selectionArgs You may include ? s in where clause in the query ,
* which will be replaced by the values from selectionArgs . The
* values will be bound as Strings .
* @ param editTable the name of the first table , which is editable
* @ param cancellationSignal A signal to cancel the operation in progress , or null if none .
* If the operation is canceled , then { @ link OperationCanceledException } will be thrown
* when the query is executed .
* @ return A { @ link Cursor } object , which is positioned before the first entry . Note that
* { @ link Cursor } s are not synchronized , see the documentation for more details . */
public Cursor rawQueryWithFactory ( CursorFactory cursorFactory , String sql , String [ ] selectionArgs , String editTable , CancellationSignal cancellationSignal ) { } } | acquireReference ( ) ; try { SQLiteCursorDriver driver = new SQLiteDirectCursorDriver ( this , sql , editTable , cancellationSignal ) ; return driver . query ( cursorFactory != null ? cursorFactory : mCursorFactory , selectionArgs ) ; } finally { releaseReference ( ) ; } |
public class ProtocolUtils { /** * Formats a message opcode for logs and error messages .
* < p > Note that the reason why we don ' t use enums is because the driver can be extended with
* custom opcodes . */
public static String opcodeString ( int opcode ) { } } | switch ( opcode ) { case ProtocolConstants . Opcode . ERROR : return "ERROR" ; case ProtocolConstants . Opcode . STARTUP : return "STARTUP" ; case ProtocolConstants . Opcode . READY : return "READY" ; case ProtocolConstants . Opcode . AUTHENTICATE : return "AUTHENTICATE" ; case ProtocolConstants . Opcode . OPTIONS : return "OPTIONS" ; case ProtocolConstants . Opcode . SUPPORTED : return "SUPPORTED" ; case ProtocolConstants . Opcode . QUERY : return "QUERY" ; case ProtocolConstants . Opcode . RESULT : return "RESULT" ; case ProtocolConstants . Opcode . PREPARE : return "PREPARE" ; case ProtocolConstants . Opcode . EXECUTE : return "EXECUTE" ; case ProtocolConstants . Opcode . REGISTER : return "REGISTER" ; case ProtocolConstants . Opcode . EVENT : return "EVENT" ; case ProtocolConstants . Opcode . BATCH : return "BATCH" ; case ProtocolConstants . Opcode . AUTH_CHALLENGE : return "AUTH_CHALLENGE" ; case ProtocolConstants . Opcode . AUTH_RESPONSE : return "AUTH_RESPONSE" ; case ProtocolConstants . Opcode . AUTH_SUCCESS : return "AUTH_SUCCESS" ; default : return "0x" + Integer . toHexString ( opcode ) ; } |
public class CoordinatorProxyService { /** * Create a @ SocketStoreClientFactory from the given configPops
* @ param bootstrapURLs
* @ param configProps
* @ return */
private SocketStoreClientFactory getFatClientFactory ( String [ ] bootstrapURLs , Properties configProps ) { } } | ClientConfig fatClientConfig = new ClientConfig ( configProps ) ; logger . info ( "Using config: " + fatClientConfig ) ; fatClientConfig . setBootstrapUrls ( bootstrapURLs ) . setEnableCompressionLayer ( false ) . setEnableSerializationLayer ( false ) . enableDefaultClient ( true ) . setEnableLazy ( false ) ; return new SocketStoreClientFactory ( fatClientConfig ) ; |
public class TagLibTag { /** * Setzt die Information , was fuer ein BodyContent das Tag haben kann . Diese Methode wird durch die
* Klasse TagLibFactory verwendet .
* @ param value BodyContent Information . */
public void setBodyContent ( String value ) { } } | // empty , free , must , tagdependent
value = value . toLowerCase ( ) . trim ( ) ; // if ( value . equals ( " jsp " ) ) value = " free " ;
this . hasBody = ! value . equals ( "empty" ) ; this . isBodyReq = ! value . equals ( "free" ) ; this . isTagDependent = value . equals ( "tagdependent" ) ; bodyFree = value . equals ( "free" ) ; |
public class Conference { /** * Retrieves the conference information .
* @ param client the client
* @ param id the conference id .
* @ return id the conference id .
* @ throws IOException unexpected error . */
public static Conference getConference ( final BandwidthClient client , final String id ) throws Exception { } } | final String conferencesUri = client . getUserResourceUri ( BandwidthConstants . CONFERENCES_URI_PATH ) ; final String conferenceUri = StringUtils . join ( new String [ ] { conferencesUri , id } , '/' ) ; final JSONObject jsonObject = toJSONObject ( client . get ( conferenceUri , null ) ) ; return new Conference ( client , jsonObject ) ; |
public class IfcDocumentInformationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcDocumentInformationRelationship > getIsPointer ( ) { } } | return ( EList < IfcDocumentInformationRelationship > ) eGet ( Ifc2x3tc1Package . Literals . IFC_DOCUMENT_INFORMATION__IS_POINTER , true ) ; |
public class QueryDataUtils { /** * Exports single query page into CSV rows
* @ param replierExportStrategy replier export strategy
* @ param replies replies to be exported
* @ param stamp stamp
* @ param queryPage query page to be exported
* @ return CSV rows */
private static List < String [ ] > exportQueryPageCommentsAsCsv ( ReplierExportStrategy replierExportStrategy , List < QueryReply > replies , PanelStamp stamp , QueryPage queryPage ) { } } | QueryPageHandler queryPageHandler = QueryPageHandlerFactory . getInstance ( ) . buildPageHandler ( queryPage . getPageType ( ) ) ; if ( queryPageHandler != null ) { ReportPageCommentProcessor processor = queryPageHandler . exportComments ( queryPage , stamp , replies ) ; if ( processor != null ) { return exportQueryPageCommentsAsCsv ( replierExportStrategy , queryPage , processor ) ; } } return Collections . emptyList ( ) ; |
public class ResourceGroupsInner { /** * Creates a resource group .
* @ param resourceGroupName The name of the resource group to create or update .
* @ param parameters Parameters supplied to the create or update a resource group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ResourceGroupInner object */
public Observable < ResourceGroupInner > createOrUpdateAsync ( String resourceGroupName , ResourceGroupInner parameters ) { } } | return createOrUpdateWithServiceResponseAsync ( resourceGroupName , parameters ) . map ( new Func1 < ServiceResponse < ResourceGroupInner > , ResourceGroupInner > ( ) { @ Override public ResourceGroupInner call ( ServiceResponse < ResourceGroupInner > response ) { return response . body ( ) ; } } ) ; |
public class TrafficLight2 { /** * Enables / disables the green light
* @ param GREEN _ ON */
public void setGreenOn ( final boolean GREEN_ON ) { } } | boolean oldGreenOn = greenOn ; greenOn = GREEN_ON ; propertySupport . firePropertyChange ( GREEN_PROPERTY , oldGreenOn , greenOn ) ; repaint ( getInnerBounds ( ) ) ; |
public class ThrottledApiHandler { /** * Get a list of all summoner spells as Java Collection
* This method does not count towards the rate limit and is not affected by the throttle
* @ param data Additional information to retrieve
* @ return The summoner spells
* @ see < a href = https : / / developer . riotgames . com / api / methods # ! / 649/2174 > Official API documentation < / a > */
public Future < Collection < SummonerSpell > > getSummonerSpells ( SpellData data ) { } } | return new DummyFuture < > ( handler . getSummonerSpells ( data ) ) ; |
public class CmsNewResourceTypeDialog { /** * Locks the given resource temporarily . < p >
* @ param resource the resource to lock
* @ throws CmsException if locking fails */
private void lockTemporary ( CmsResource resource ) throws CmsException { } } | CmsUser user = m_cms . getRequestContext ( ) . getCurrentUser ( ) ; CmsLock lock = m_cms . getLock ( resource ) ; if ( ! lock . isOwnedBy ( user ) ) { m_cms . lockResourceTemporary ( resource ) ; } else if ( ! lock . isOwnedInProjectBy ( user , m_cms . getRequestContext ( ) . getCurrentProject ( ) ) ) { m_cms . changeLock ( resource ) ; } |
public class DfuServiceListenerHelper { /** * Registers the { @ link DfuProgressListener } .
* Registered listener will receive the progress events from the DFU service .
* @ param context the application context .
* @ param listener the listener to register . */
public static void registerProgressListener ( @ NonNull final Context context , @ NonNull final DfuProgressListener listener ) { } } | if ( mProgressBroadcastReceiver == null ) { mProgressBroadcastReceiver = new ProgressBroadcastsReceiver ( ) ; final IntentFilter filter = new IntentFilter ( ) ; filter . addAction ( DfuBaseService . BROADCAST_PROGRESS ) ; filter . addAction ( DfuBaseService . BROADCAST_ERROR ) ; LocalBroadcastManager . getInstance ( context ) . registerReceiver ( mProgressBroadcastReceiver , filter ) ; } mProgressBroadcastReceiver . setProgressListener ( listener ) ; |
public class DepthFirstSearch { /** * beginning with startNode add all following nodes to LIFO queue . If node has been already
* explored before , skip reexploration . */
@ Override public void start ( EdgeExplorer explorer , int startNode ) { } } | IntArrayDeque stack = new IntArrayDeque ( ) ; GHBitSet explored = createBitSet ( ) ; stack . addLast ( startNode ) ; int current ; while ( stack . size ( ) > 0 ) { current = stack . removeLast ( ) ; if ( ! explored . contains ( current ) && goFurther ( current ) ) { EdgeIterator iter = explorer . setBaseNode ( current ) ; while ( iter . next ( ) ) { int connectedId = iter . getAdjNode ( ) ; if ( checkAdjacent ( iter ) ) { stack . addLast ( connectedId ) ; } } explored . add ( current ) ; } } |
public class SuppressionInfo { /** * Returns an instance of { @ code SuppressionInfo } that takes into account any suppression signals
* present on { @ code sym } as well as those already stored in { @ code this } .
* < p > Checks suppressions for any { @ code @ SuppressWarnings } , Android ' s { @ code SuppressLint } , and
* custom suppression annotations described by { @ code customSuppressionAnnosToLookFor } .
* < p > We do not modify the existing suppression sets , so they can be restored when moving up the
* tree . We also avoid copying the suppression sets if the next node to explore does not have any
* suppressed warnings or custom suppression annotations . This is the common case .
* @ param sym The { @ code Symbol } for the AST node currently being scanned
* @ param state VisitorState for checking the current tree , as well as for getting the { @ code
* SuppressWarnings symbol type } . */
public SuppressionInfo withExtendedSuppressions ( Symbol sym , VisitorState state , Set < Class < ? extends Annotation > > customSuppressionAnnosToLookFor ) { } } | boolean newInGeneratedCode = inGeneratedCode || isGenerated ( sym , state ) ; boolean anyModification = newInGeneratedCode != inGeneratedCode ; /* Handle custom suppression annotations . */
Set < Class < ? extends Annotation > > newCustomSuppressions = null ; for ( Class < ? extends Annotation > annotationType : customSuppressionAnnosToLookFor ) { // Don ' t need to check already - suppressed annos
if ( customSuppressions . contains ( annotationType ) ) { continue ; } if ( ASTHelpers . hasAnnotation ( sym , annotationType , state ) ) { anyModification = true ; if ( newCustomSuppressions == null ) { newCustomSuppressions = new HashSet < > ( customSuppressions ) ; } newCustomSuppressions . add ( annotationType ) ; } } /* Handle { @ code @ SuppressWarnings } and { @ code @ SuppressLint } . */
Set < String > newSuppressions = null ; // Iterate over annotations on this symbol , looking for SuppressWarnings
for ( Attribute . Compound attr : sym . getAnnotationMirrors ( ) ) { if ( ( attr . type . tsym == state . getSymtab ( ) . suppressWarningsType . tsym ) || attr . type . tsym . getQualifiedName ( ) . contentEquals ( "android.annotation.SuppressLint" ) ) { for ( Pair < MethodSymbol , Attribute > value : attr . values ) { if ( value . fst . name . contentEquals ( "value" ) ) { if ( value . snd instanceof Attribute . Array ) { // SuppressWarnings / SuppressLint take an array
for ( Attribute suppress : ( ( Attribute . Array ) value . snd ) . values ) { String suppressedWarning = ( String ) suppress . getValue ( ) ; if ( ! suppressWarningsStrings . contains ( suppressedWarning ) ) { anyModification = true ; if ( newSuppressions == null ) { newSuppressions = new HashSet < > ( suppressWarningsStrings ) ; } newSuppressions . add ( suppressedWarning ) ; } } } else { throw new RuntimeException ( "Expected SuppressWarnings/SuppressLint annotation to take array type" ) ; } } } } } // Since this is invoked every time we descend into a new node , let ' s save some garbage
// by returning the same instance if there were no changes .
if ( ! anyModification ) { return this ; } if ( newCustomSuppressions == null ) { newCustomSuppressions = customSuppressions ; } if ( newSuppressions == null ) { newSuppressions = suppressWarningsStrings ; } return new SuppressionInfo ( newSuppressions , newCustomSuppressions , newInGeneratedCode ) ; |
public class MapKeyLoader { /** * Loads keys from the map loader and sends them to the partition owners in batches
* for value loading . This method will return after all keys have been dispatched
* to the partition owners for value loading and all partitions have been notified
* that the key loading has completed .
* The values will still be loaded asynchronously and can be put into the record
* stores after this method has returned .
* If there is a configured max size policy per node , the keys will be loaded until this
* many keys have been loaded from the map loader . If the keys returned from the
* map loader are not equally distributed over all partitions , this may cause some nodes
* to load more entries than others and exceed the configured policy .
* @ param mapStoreContext the map store context for this map
* @ param replaceExistingValues if the existing entries for the loaded keys should be replaced
* @ throws Exception if there was an exception when notifying the record stores that the key
* loading has finished
* @ see MapLoader # loadAllKeys ( ) */
private void sendKeysInBatches ( MapStoreContext mapStoreContext , boolean replaceExistingValues ) throws Exception { } } | if ( logger . isFinestEnabled ( ) ) { logger . finest ( "sendKeysInBatches invoked " + getStateMessage ( ) ) ; } int clusterSize = partitionService . getMemberPartitionsMap ( ) . size ( ) ; Iterator < Object > keys = null ; Throwable loadError = null ; try { Iterable < Object > allKeys = mapStoreContext . loadAllKeys ( ) ; keys = allKeys . iterator ( ) ; Iterator < Data > dataKeys = map ( keys , toData ) ; int mapMaxSize = clusterSize * maxSizePerNode ; if ( mapMaxSize > 0 ) { dataKeys = limit ( dataKeys , mapMaxSize ) ; } Iterator < Entry < Integer , Data > > partitionsAndKeys = map ( dataKeys , toPartition ( partitionService ) ) ; Iterator < Map < Integer , List < Data > > > batches = toBatches ( partitionsAndKeys , maxBatch ) ; List < Future > futures = new ArrayList < > ( ) ; while ( batches . hasNext ( ) ) { Map < Integer , List < Data > > batch = batches . next ( ) ; futures . addAll ( sendBatch ( batch , replaceExistingValues ) ) ; } // This acts as a barrier to prevent re - ordering of key distribution operations ( LoadAllOperation )
// and LoadStatusOperation ( s ) which indicates all keys were already loaded .
// Re - ordering of in - flight operations can happen during a partition migration . We are waiting here
// for all LoadAllOperation ( s ) to be ACKed by receivers and only then we send them the LoadStatusOperation
// See https : / / github . com / hazelcast / hazelcast / issues / 4024 for additional details
FutureUtil . waitForever ( futures ) ; } catch ( Exception caught ) { loadError = caught ; } finally { sendKeyLoadCompleted ( clusterSize , loadError ) ; if ( keys instanceof Closeable ) { closeResource ( ( Closeable ) keys ) ; } } |
public class VasEventHandler { /** * Converts the opening times from the { @ link io . motown . domain . api . chargingstation . OpeningTime } to the { @ link io . motown . vas . viewmodel . persistence . entities . OpeningTime } format .
* @ param input the opening times from the core .
* @ return the new set of opening times . */
private Set < io . motown . vas . viewmodel . persistence . entities . OpeningTime > convertFromApiOpeningTimes ( Set < OpeningTime > input ) { } } | Set < io . motown . vas . viewmodel . persistence . entities . OpeningTime > output = new HashSet < > ( ) ; for ( OpeningTime source : input ) { io . motown . vas . viewmodel . persistence . entities . OpeningTime openingTime = new io . motown . vas . viewmodel . persistence . entities . OpeningTime ( ) ; openingTime . setDay ( Day . fromValue ( source . getDay ( ) . value ( ) ) ) ; openingTime . setTimeStart ( source . getTimeStart ( ) . getHourOfDay ( ) * MINUTES_IN_HOUR + source . getTimeStart ( ) . getMinutesInHour ( ) ) ; openingTime . setTimeStop ( source . getTimeStop ( ) . getHourOfDay ( ) * MINUTES_IN_HOUR + source . getTimeStop ( ) . getMinutesInHour ( ) ) ; output . add ( openingTime ) ; } return output ; |
public class JtaProcessEngineConfiguration { /** * provide custom command executor that uses NON - JTA transactions */
@ Override protected void initCommandExecutorDbSchemaOperations ( ) { } } | if ( commandExecutorSchemaOperations == null ) { List < CommandInterceptor > commandInterceptorsDbSchemaOperations = new ArrayList < CommandInterceptor > ( ) ; commandInterceptorsDbSchemaOperations . add ( new LogInterceptor ( ) ) ; commandInterceptorsDbSchemaOperations . add ( new CommandContextInterceptor ( dbSchemaOperationsCommandContextFactory , this ) ) ; commandInterceptorsDbSchemaOperations . add ( actualCommandExecutor ) ; commandExecutorSchemaOperations = initInterceptorChain ( commandInterceptorsDbSchemaOperations ) ; } |
public class ThreadIdentityManager { /** * Set the server ' s identity as the thread identity .
* @ return A token representing the identity previously on the thread .
* This token must be passed to the subsequent reset call . */
public static Object runAsServer ( ) { } } | LinkedHashMap < ThreadIdentityService , Object > token = null ; if ( ! checkForRecursionAndSet ( ) ) { try { for ( int i = 0 , size = threadIdentityServices . size ( ) ; i < size ; ++ i ) { ThreadIdentityService tis = threadIdentityServices . get ( i ) ; if ( tis . isAppThreadIdentityEnabled ( ) ) { if ( token == null ) { token = new LinkedHashMap < ThreadIdentityService , Object > ( ) ; } token . put ( tis , tis . runAsServer ( ) ) ; } } } finally { resetRecursionCheck ( ) ; } } return token == null ? Collections . EMPTY_MAP : token ; |
public class FSABuilder { /** * Return < code > true < / code > if two regions in { @ link # serialized } are
* identical . */
private boolean equivalent ( int start1 , int start2 , int len ) { } } | if ( start1 + len > size || start2 + len > size ) return false ; while ( len -- > 0 ) if ( serialized [ start1 ++ ] != serialized [ start2 ++ ] ) return false ; return true ; |
public class AbstractIncrementalDFADAGBuilder { /** * Returns the canonical state for the given state ' s signature , or registers the state as canonical if no state with
* that signature exists .
* @ param state
* the state
* @ return the canonical state for the given state ' s signature */
protected State replaceOrRegister ( State state ) { } } | StateSignature sig = state . getSignature ( ) ; State other = register . get ( sig ) ; if ( other != null ) { if ( state != other ) { for ( int i = 0 ; i < sig . successors . array . length ; i ++ ) { State succ = sig . successors . array [ i ] ; if ( succ != null ) { succ . decreaseIncoming ( ) ; } } } return other ; } register . put ( sig , state ) ; return state ; |
public class SPropertyOrQuery { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > select the property to be matched by the lookup < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . START . node ( n ) . byIndex ( " Person " ) . < b > property ( " name " ) < / b > < / i > < / div >
* < br / > */
public SPropertyValue property ( String name ) { } } | StartExpression sx = ( StartExpression ) this . astNode ; sx . setPropertyOrQuery ( new PropertyOrQuery ( name , null ) ) ; SPropertyValue ret = new SPropertyValue ( sx ) ; return ret ; |
public class DecimalFormat { /** * Returns the length matched by the given affix , or - 1 if none .
* @ param affixPat pattern string
* @ param text input text
* @ param pos offset into input at which to begin matching
* @ param type parse against currency type , LONG _ NAME only or not .
* @ param currency return value for parsed currency , for generic
* currency parsing mode , or null for normal parsing . In generic
* currency parsing mode , any currency is parsed , not just the
* currency that this formatter is set to .
* @ return position after the matched text , or - 1 if match failure */
private int compareComplexAffix ( String affixPat , String text , int pos , int type , Currency [ ] currency ) { } } | int start = pos ; for ( int i = 0 ; i < affixPat . length ( ) && pos >= 0 ; ) { char c = affixPat . charAt ( i ++ ) ; if ( c == QUOTE ) { for ( ; ; ) { int j = affixPat . indexOf ( QUOTE , i ) ; if ( j == i ) { pos = match ( text , pos , QUOTE ) ; i = j + 1 ; break ; } else if ( j > i ) { pos = match ( text , pos , affixPat . substring ( i , j ) ) ; i = j + 1 ; if ( i < affixPat . length ( ) && affixPat . charAt ( i ) == QUOTE ) { pos = match ( text , pos , QUOTE ) ; ++ i ; // loop again
} else { break ; } } else { // Unterminated quote ; should be caught by apply
// pattern .
throw new RuntimeException ( ) ; } } continue ; } String affix = null ; switch ( c ) { case CURRENCY_SIGN : // since the currency names in choice format is saved the same way as
// other currency names , do not need to do currency choice parsing here .
// the general currency parsing parse against all names , including names
// in choice format . assert ( currency ! = null | | ( getCurrency ( ) ! = null & &
// currencyChoice ! = null ) ) ;
boolean intl = i < affixPat . length ( ) && affixPat . charAt ( i ) == CURRENCY_SIGN ; if ( intl ) { ++ i ; } boolean plural = i < affixPat . length ( ) && affixPat . charAt ( i ) == CURRENCY_SIGN ; if ( plural ) { ++ i ; intl = false ; } // Parse generic currency - - anything for which we have a display name , or
// any 3 - letter ISO code . Try to parse display name for our locale ; first
// determine our locale . TODO : use locale in CurrencyPluralInfo
ULocale uloc = getLocale ( ULocale . VALID_LOCALE ) ; if ( uloc == null ) { // applyPattern has been called ; use the symbols
uloc = symbols . getLocale ( ULocale . VALID_LOCALE ) ; } // Delegate parse of display name = > ISO code to Currency
ParsePosition ppos = new ParsePosition ( pos ) ; // using Currency . parse to handle mixed style parsing .
String iso = Currency . parse ( uloc , text , type , ppos ) ; // If parse succeeds , populate currency [ 0]
if ( iso != null ) { if ( currency != null ) { currency [ 0 ] = Currency . getInstance ( iso ) ; } else { // The formatter is currency - style but the client has not requested
// the value of the parsed currency . In this case , if that value does
// not match the formatter ' s current value , then the parse fails .
Currency effectiveCurr = getEffectiveCurrency ( ) ; if ( iso . compareTo ( effectiveCurr . getCurrencyCode ( ) ) != 0 ) { pos = - 1 ; continue ; } } pos = ppos . getIndex ( ) ; } else { pos = - 1 ; } continue ; case PATTERN_PERCENT : affix = symbols . getPercentString ( ) ; break ; case PATTERN_PER_MILLE : affix = symbols . getPerMillString ( ) ; break ; case PATTERN_PLUS_SIGN : affix = symbols . getPlusSignString ( ) ; break ; case PATTERN_MINUS_SIGN : affix = symbols . getMinusSignString ( ) ; break ; default : // fall through to affix ! = null test , which will fail
break ; } if ( affix != null ) { pos = match ( text , pos , affix ) ; continue ; } pos = match ( text , pos , c ) ; if ( PatternProps . isWhiteSpace ( c ) ) { i = skipPatternWhiteSpace ( affixPat , i ) ; } } return pos - start ; |
public class Flash { /** * Adds a value with a specific key to the flash overwriting an
* existing value
* @ param key The key
* @ param value The value */
public void put ( String key , String value ) { } } | if ( validCharacters ( key ) && validCharacters ( value ) ) { this . values . put ( key , value ) ; } |
public class MergedNsContext { /** * Method called by the matching start element class to
* output all namespace declarations active in current namespace
* scope , if any . */
@ Override public void outputNamespaceDeclarations ( XMLStreamWriter w ) throws XMLStreamException { } } | for ( int i = 0 , len = mNamespaces . size ( ) ; i < len ; ++ i ) { Namespace ns = mNamespaces . get ( i ) ; if ( ns . isDefaultNamespaceDeclaration ( ) ) { w . writeDefaultNamespace ( ns . getNamespaceURI ( ) ) ; } else { w . writeNamespace ( ns . getPrefix ( ) , ns . getNamespaceURI ( ) ) ; } } |
public class SQLiteViewStore { /** * Returns the prefix of the key to use in the result row , at this groupLevel */
public static Object groupKey ( Object key , int groupLevel ) { } } | if ( groupLevel > 0 && ( key instanceof List ) && ( ( ( List < Object > ) key ) . size ( ) > groupLevel ) ) { return ( ( List < Object > ) key ) . subList ( 0 , groupLevel ) ; } else { return key ; } |
public class URIUtils { /** * Append scheme , host and port URI prefix , handling IPv6 address encoding and default ports
* @ param url StringBuilder to append to
* @ param scheme the URI scheme
* @ param server the URI server
* @ param port the URI port */
public static void appendSchemeHostPort ( StringBuilder url , String scheme , String server , int port ) { } } | url . append ( scheme ) . append ( "://" ) . append ( HostPort . normalizeHost ( server ) ) ; if ( port > 0 ) { switch ( scheme ) { case "http" : if ( port != 80 ) url . append ( ':' ) . append ( port ) ; break ; case "https" : if ( port != 443 ) url . append ( ':' ) . append ( port ) ; break ; default : url . append ( ':' ) . append ( port ) ; } } |
public class CSLUtils { /** * Reads a byte array from a stream . Closes the stream after reading .
* @ param is the stream
* @ return the byte array
* @ throws IOException if the stream contents could not be read */
public static byte [ ] readStream ( InputStream is ) throws IOException { } } | try { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; byte [ ] buf = new byte [ 1024 * 10 ] ; int read ; while ( ( read = is . read ( buf ) ) >= 0 ) { baos . write ( buf , 0 , read ) ; } return baos . toByteArray ( ) ; } finally { is . close ( ) ; } |
public class PdfContentByte { /** * Changes the current color for filling paths ( device dependent colors ! ) .
* Sets the color space to < B > DeviceCMYK < / B > ( or the < B > DefaultCMYK < / B > color space ) ,
* and sets the color to use for filling paths . < / P >
* This method is described in the ' Portable Document Format Reference Manual version 1.3'
* section 8.5.2.1 ( page 331 ) . < / P >
* Following the PDF manual , each operand must be a number between 0 ( no ink ) and
* 1 ( maximum ink ) . This method however accepts only integers between 0x00 and 0xFF . < / P >
* @ param cyan the intensity of cyan
* @ param magenta the intensity of magenta
* @ param yellow the intensity of yellow
* @ param black the intensity of black */
public void setCMYKColorFill ( int cyan , int magenta , int yellow , int black ) { } } | content . append ( ( float ) ( cyan & 0xFF ) / 0xFF ) ; content . append ( ' ' ) ; content . append ( ( float ) ( magenta & 0xFF ) / 0xFF ) ; content . append ( ' ' ) ; content . append ( ( float ) ( yellow & 0xFF ) / 0xFF ) ; content . append ( ' ' ) ; content . append ( ( float ) ( black & 0xFF ) / 0xFF ) ; content . append ( " k" ) . append_i ( separator ) ; |
public class ReasonFlags { /** * Get the attribute value . */
public Object get ( String name ) throws IOException { } } | return Boolean . valueOf ( isSet ( name2Index ( name ) ) ) ; |
public class Fraction { /** * < p > Gets a fraction that is the inverse ( 1 / fraction ) of this one . < / p >
* < p > The returned fraction is not reduced . < / p >
* @ return a new fraction instance with the numerator and denominator
* inverted .
* @ throws ArithmeticException if the fraction represents zero . */
public Fraction invert ( ) { } } | if ( numerator == 0 ) { throw new ArithmeticException ( "Unable to invert zero." ) ; } if ( numerator == Integer . MIN_VALUE ) { throw new ArithmeticException ( "overflow: can't negate numerator" ) ; } if ( numerator < 0 ) { return new Fraction ( - denominator , - numerator ) ; } return new Fraction ( denominator , numerator ) ; |
public class MMCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . MMC__MM_CID : setMMCid ( ( Integer ) newValue ) ; return ; case AfplibPackage . MMC__PARAMETER1 : setPARAMETER1 ( ( Integer ) newValue ) ; return ; case AfplibPackage . MMC__RG : getRg ( ) . clear ( ) ; getRg ( ) . addAll ( ( Collection < ? extends MMCRG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class EscapeUtils { /** * Escapes quotes and backslashes in a string . Double quotes are replaced
* with a backslash followed by a double quote , and backslashes are replaced
* with a double backslash .
* @ param s
* The string to be escaped
* @ return The escaped string . */
@ Nonnull public static String escapeString ( @ Nonnull String s ) { } } | // We replace double quotes with a back slash followed
// by a double quote . We replace backslashes with a double
// backslash
if ( s . indexOf ( '\"' ) == - 1 && s . indexOf ( '\\' ) == - 1 ) { return s ; } StringBuilder sb = new StringBuilder ( s . length ( ) + 20 ) ; for ( int i = 0 ; i < s . length ( ) ; i ++ ) { char ch = s . charAt ( i ) ; if ( ch == '\\' ) { sb . append ( "\\\\" ) ; } else if ( ch == '\"' ) { sb . append ( "\\\"" ) ; } else { sb . append ( ch ) ; } } return verifyNotNull ( sb . toString ( ) ) ; |
public class EditTextValidator { /** * name = " android : textColorPrimaryInverse " > @ android : color / primary _ text _ light < / item > */
public static String getText ( EditText editText , int minLen , int errMsgResId , Object ... errMsgFormatArgs ) throws ValidationException { } } | return getText ( editText , minLen , editText . getContext ( ) . getString ( errMsgResId , errMsgFormatArgs ) ) ; |
public class EventEmitter { /** * Emits a < b > LOCAL < / b > event to < b > ALL < / b > listeners from ALL event groups ,
* who are listening this event . Sample code : < br >
* < br >
* Tree params = new Tree ( ) ; < br >
* params . put ( " a " , true ) ; < br >
* params . putList ( " b " ) . add ( 1 ) . add ( 2 ) . add ( 3 ) ; < br >
* ctx . broadcastLocal ( " user . modified " , params ) ;
* @ param name
* name of event ( eg . " user . created " )
* @ param payload
* { @ link Tree } structure ( payload of the event ) */
public void broadcastLocal ( String name , Tree payload ) { } } | eventbus . broadcast ( name , payload , null , true ) ; |
public class RelationSumPermutationFunction { /** * { @ inheritDoc } */
public T permute ( T vector , DependencyPath path ) { } } | int bestRelationScore = 0 ; for ( DependencyRelation link : path ) vector = permFunc . permute ( vector , getRelationScore ( link . relation ( ) ) ) ; return vector ; } private static int getRelationScore ( String relation ) { if ( relation . length ( ) == 0 ) return 0 ; if ( relation . equals ( "SBJ" ) ) return 6 ; if ( relation . equals ( "OBJ" ) ) return 5 ; if ( relation . equals ( "NMOD" ) ) return 4 ; if ( relation . equals ( "VMOD" ) ) return 3 ; if ( relation . equals ( "ADV" ) ) return 2 ; return 1 ; } |
public class PrimitiveWrapperPersistenceDelegate { /** * Two wrapper objects are regarded mutatable if they are equal . */
@ Override protected boolean mutatesTo ( Object o1 , Object o2 ) { } } | if ( null == o2 ) { return false ; } return o1 . equals ( o2 ) ; |
public class CCTask { /** * Adds a target definition or reference ( Non - functional prototype ) .
* @ param target
* target
* @ throws NullPointerException
* if compiler is null */
public void addConfiguredTarget ( final TargetDef target ) { } } | if ( target == null ) { throw new NullPointerException ( "target" ) ; } target . setProject ( getProject ( ) ) ; this . targetPlatforms . addElement ( target ) ; |
public class IconicsDrawable { /** * Set rounded corner from px
* @ return The current IconicsDrawable for chaining . */
@ NonNull public IconicsDrawable roundedCornersRyPx ( @ Dimension ( unit = PX ) int sizePx ) { } } | mRoundedCornerRy = sizePx ; invalidateSelf ( ) ; return this ; |
public class ShutdownSystem { /** * Start the server shutdown */
public static void shutdownActive ( ShutdownModeAmp mode , ExitCode exitCode , String msg , Result < String > result ) { } } | ShutdownSystem shutdown = _activeService . get ( ) ; if ( shutdown != null ) { shutdown . shutdown ( mode , exitCode , msg , result ) ; return ; } shutdown = getCurrent ( ) ; if ( shutdown != null ) { shutdown . shutdown ( mode , exitCode , msg , result ) ; return ; } msg = ShutdownSystem . class . getSimpleName ( ) + " is not active:\n " + msg ; log . warning ( msg ) ; System . out . println ( msg ) ; if ( result != null ) { result . ok ( msg ) ; } |
public class ConsumerMonitorRegistrar { /** * Method addCallbackToConnectionIndex
* Adds a new callback to the callback index .
* @ param topicExpression
* @ param isWildcarded
* @ param callback
* @ return */
public void addCallbackToConnectionIndex ( ConnectionImpl connection , String topicExpression , boolean isWildcarded , ConsumerSetChangeCallback callback ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addCallbackToConnectionIndex" , new Object [ ] { connection , topicExpression , new Boolean ( isWildcarded ) , callback } ) ; // Map of callbacks - to - expressions for a connection
Map connMap = null ; if ( _callbackIndex . containsKey ( connection ) ) { // Already have registered callbacks for this connection
connMap = ( HashMap ) _callbackIndex . get ( connection ) ; } else { // No registered callbacks for this connection
connMap = new HashMap ( ) ; _callbackIndex . put ( connection , connMap ) ; } // Add the new callback to the index , so we can find it when we deregister
TopicRecord tRecord = new TopicRecord ( topicExpression , isWildcarded ) ; connMap . put ( callback , tRecord ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addCallbackToConnectionIndex" ) ; |
public class RegxFunctionUtil { /** * 进行正则判断
* @ param str 判断的字符串
* @ param regex 正则表达式
* @ return 是否存在符合正则的字符串 */
public boolean match ( String str , String regex ) { } } | if ( str == null || regex == null ) return false ; if ( regex . trim ( ) . isEmpty ( ) ) return true ; return Pattern . compile ( regex . trim ( ) ) . matcher ( str ) . find ( ) ; |
public class AdminToolLog4j2Util { /** * returns all logger names including custom loggers
* @ since 1.1.1
* @ return */
public Collection < String > getAllLoggerNames ( ) { } } | Set < String > loggerNames = new TreeSet < > ( ) ; for ( Logger logger : getParentLoggers ( ) ) { loggerNames . add ( logger . getName ( ) ) ; } for ( Logger logger : getLoggers ( ) ) { loggerNames . add ( logger . getName ( ) ) ; } if ( ! customLoggers . isEmpty ( ) ) { for ( Entry < LoggerConfig , String > entry : customLoggers . entrySet ( ) ) { loggerNames . add ( entry . getKey ( ) . getName ( ) ) ; } } if ( ! customParentLoggers . isEmpty ( ) ) { for ( Entry < LoggerConfig , String > entry : customParentLoggers . entrySet ( ) ) { loggerNames . add ( entry . getKey ( ) . getName ( ) ) ; } } return loggerNames ; |
public class SafeServiceLoader { /** * Parses a single line of a META - INF / services resources . If the line contains a class name , the
* name is added to the given list .
* @ param names list of class names
* @ param line line to be parsed */
private void parseLine ( List < String > names , String line ) { } } | int commentPos = line . indexOf ( '#' ) ; if ( commentPos >= 0 ) { line = line . substring ( 0 , commentPos ) ; } line = line . trim ( ) ; if ( ! line . isEmpty ( ) && ! names . contains ( line ) ) { names . add ( line ) ; } |
public class BugProperty { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . xml . XMLWriteable # writeXML ( edu . umd . cs . findbugs . xml
* . XMLOutput ) */
@ Override public void writeXML ( XMLOutput xmlOutput ) throws IOException { } } | xmlOutput . openCloseTag ( "Property" , new XMLAttributeList ( ) . addAttribute ( "name" , getName ( ) ) . addAttribute ( "value" , getValue ( ) ) ) ; |
public class Config { /** * Sets the policy to use for handling { @ link Connections # create ( ConnectionOptions , Config , ClassLoader )
* connection attempt } errors . Overrides the { @ link # withRetryPolicy ( RetryPolicy ) global retry
* policy } .
* @ see # withConnectRetryPolicy ( RetryPolicy )
* @ see # withRetryPolicy ( RetryPolicy ) */
public RetryPolicy getConnectRetryPolicy ( ) { } } | RetryPolicy result = connectRetryPolicy == null ? retryPolicy : connectRetryPolicy ; return result != null ? result : parent != null ? parent . getConnectRetryPolicy ( ) : null ; |
public class ActivityChooserModel { /** * Command for reading the historical records from a file off the UI thread . */
private void readHistoricalDataImpl ( ) { } } | FileInputStream fis = null ; try { fis = mContext . openFileInput ( mHistoryFileName ) ; } catch ( FileNotFoundException fnfe ) { if ( DEBUG ) { Log . i ( LOG_TAG , "Could not open historical records file: " + mHistoryFileName ) ; } return ; } try { XmlPullParser parser = Xml . newPullParser ( ) ; parser . setInput ( fis , null ) ; int type = XmlPullParser . START_DOCUMENT ; while ( type != XmlPullParser . END_DOCUMENT && type != XmlPullParser . START_TAG ) { type = parser . next ( ) ; } if ( ! TAG_HISTORICAL_RECORDS . equals ( parser . getName ( ) ) ) { throw new XmlPullParserException ( "Share records file does not start with " + TAG_HISTORICAL_RECORDS + " tag." ) ; } List < HistoricalRecord > historicalRecords = mHistoricalRecords ; historicalRecords . clear ( ) ; while ( true ) { type = parser . next ( ) ; if ( type == XmlPullParser . END_DOCUMENT ) { break ; } if ( type == XmlPullParser . END_TAG || type == XmlPullParser . TEXT ) { continue ; } String nodeName = parser . getName ( ) ; if ( ! TAG_HISTORICAL_RECORD . equals ( nodeName ) ) { throw new XmlPullParserException ( "Share records file not well-formed." ) ; } String activity = parser . getAttributeValue ( null , ATTRIBUTE_ACTIVITY ) ; final long time = Long . parseLong ( parser . getAttributeValue ( null , ATTRIBUTE_TIME ) ) ; final float weight = Float . parseFloat ( parser . getAttributeValue ( null , ATTRIBUTE_WEIGHT ) ) ; HistoricalRecord readRecord = new HistoricalRecord ( activity , time , weight ) ; historicalRecords . add ( readRecord ) ; if ( DEBUG ) { Log . i ( LOG_TAG , "Read " + readRecord . toString ( ) ) ; } } if ( DEBUG ) { Log . i ( LOG_TAG , "Read " + historicalRecords . size ( ) + " historical records." ) ; } } catch ( XmlPullParserException xppe ) { Log . e ( LOG_TAG , "Error reading historical recrod file: " + mHistoryFileName , xppe ) ; } catch ( IOException ioe ) { Log . e ( LOG_TAG , "Error reading historical recrod file: " + mHistoryFileName , ioe ) ; } finally { if ( fis != null ) { try { fis . close ( ) ; } catch ( IOException ioe ) { /* ignore */
} } } |
public class LocationDirector { /** * Called to test and set a time stamp that we use to determine if a pending moveTo request is
* stale . */
public boolean checkRepeatMove ( ) { } } | long now = System . currentTimeMillis ( ) ; if ( now - _lastRequestTime < STALE_REQUEST_DURATION ) { return true ; } else { _lastRequestTime = now ; return false ; } |
public class MyJustifiedTextView { /** * We want our text to be selectable , but we still want links to be clickable . */
@ Override public boolean onTouchEvent ( final @ NotNull MotionEvent event ) { } } | final Spannable text = ( Spannable ) getText ( ) ; if ( text != null ) { if ( event . getAction ( ) == MotionEvent . ACTION_DOWN ) { final Layout layout = getLayout ( ) ; if ( layout != null ) { // final int pos = getOffsetForPosition ( event . getX ( ) , event . getY ( ) ) ; / / API > = 14 only
final int line = getLineAtCoordinate ( layout , event . getY ( ) ) ; final int pos = getOffsetAtCoordinate ( layout , line , event . getX ( ) ) ; final ClickableSpan [ ] links = text . getSpans ( pos , pos , ClickableSpan . class ) ; if ( links != null && links . length > 0 ) { links [ 0 ] . onClick ( this ) ; return true ; } } } } return super . onTouchEvent ( event ) ; |
public class ApiOvhOrder { /** * Create order
* REST : POST / order / cdn / dedicated / { serviceName } / cacheRule / { duration }
* @ param cacheRule [ required ] cache rule upgrade option to 100 or 1000
* @ param serviceName [ required ] The internal name of your CDN offer
* @ param duration [ required ] Duration */
public OvhOrder cdn_dedicated_serviceName_cacheRule_duration_POST ( String serviceName , String duration , OvhOrderCacheRuleEnum cacheRule ) throws IOException { } } | String qPath = "/order/cdn/dedicated/{serviceName}/cacheRule/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "cacheRule" , cacheRule ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class GtkSourceViewerGenerator2 { /** * Generate the metadata section .
* @ param it the appendable */
protected void generateMetadata ( IXmlStyleAppendable it ) { } } | it . appendTagWithValue ( "property" , // $ NON - NLS - 1 $
Strings . concat ( ";" , getMimeTypes ( ) ) , // $ NON - NLS - 1 $
"name" , "mimetypes" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
final StringBuilder buffer = new StringBuilder ( ) ; for ( final String fileExtension : getLanguage ( ) . getFileExtensions ( ) ) { if ( buffer . length ( ) > 0 ) { buffer . append ( ";" ) ; // $ NON - NLS - 1 $
} buffer . append ( "*." ) . append ( fileExtension ) ; // $ NON - NLS - 1 $
} it . appendTagWithValue ( "property" , // $ NON - NLS - 1 $
buffer . toString ( ) , "name" , "globs" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
it . appendTagWithValue ( "property" , // $ NON - NLS - 1 $
"//" , // $ NON - NLS - 1 $
"name" , "line-comment-start" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
it . appendTagWithValue ( "property" , // $ NON - NLS - 1 $
"/*" , // $ NON - NLS - 1 $
"name" , "block-comment-start" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $
it . appendTagWithValue ( "property" , // $ NON - NLS - 1 $
"*/" , // $ NON - NLS - 1 $
"name" , "block-comment-end" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ |
public class OWLValueObject { /** * Builds an instance , from a given collection
* @ param model
* @ param col
* @ return
* @ throws NotYetImplementedException
* @ throws OWLTranslationException */
public static OWLValueObject buildFromCollection ( OWLModel model , Collection col ) throws NotYetImplementedException , OWLTranslationException { } } | if ( col . isEmpty ( ) ) { return null ; } return buildFromClasAndCollection ( model , OWLURIClass . from ( col . iterator ( ) . next ( ) ) , col ) ; |
public class RateThisApp { /** * Call this API when the launcher activity is launched . < br >
* It is better to call this API in onCreate ( ) of the launcher activity .
* @ param context Context */
public static void onCreate ( Context context ) { } } | SharedPreferences pref = context . getSharedPreferences ( PREF_NAME , Context . MODE_PRIVATE ) ; Editor editor = pref . edit ( ) ; // If it is the first launch , save the date in shared preference .
if ( pref . getLong ( KEY_INSTALL_DATE , 0 ) == 0L ) { storeInstallDate ( context , editor ) ; } // Increment launch times
int launchTimes = pref . getInt ( KEY_LAUNCH_TIMES , 0 ) ; launchTimes ++ ; editor . putInt ( KEY_LAUNCH_TIMES , launchTimes ) ; log ( "Launch times; " + launchTimes ) ; editor . apply ( ) ; mInstallDate = new Date ( pref . getLong ( KEY_INSTALL_DATE , 0 ) ) ; mLaunchTimes = pref . getInt ( KEY_LAUNCH_TIMES , 0 ) ; mOptOut = pref . getBoolean ( KEY_OPT_OUT , false ) ; mAskLaterDate = new Date ( pref . getLong ( KEY_ASK_LATER_DATE , 0 ) ) ; printStatus ( context ) ; |
public class GetBlockInfoPRequest { /** * < code > optional . alluxio . grpc . block . GetBlockInfoPOptions options = 2 ; < / code > */
public alluxio . grpc . GetBlockInfoPOptionsOrBuilder getOptionsOrBuilder ( ) { } } | return options_ == null ? alluxio . grpc . GetBlockInfoPOptions . getDefaultInstance ( ) : options_ ; |
public class BaseProfile { /** * generate ant build . xml
* @ param def Definition
* @ param outputDir output directory */
void generateMavenXml ( Definition def , String outputDir ) { } } | try { FileWriter pomfw = Utils . createFile ( "pom.xml" , outputDir ) ; PomXmlGen pxGen = new PomXmlGen ( ) ; pxGen . generate ( def , pomfw ) ; pomfw . close ( ) ; } catch ( IOException ioe ) { ioe . printStackTrace ( ) ; } |
public class DataSet { /** * Splits the dataset randomly into proportionally sized partitions .
* @ param rand the source of randomness for moving data around
* @ param splits any array , where the length is the number of datasets to
* create and the value of in each index is the fraction of samples that
* should be placed into that dataset . The sum of values must be less than
* or equal to 1.0
* @ return a list of new datasets */
public List < Type > randomSplit ( Random rand , double ... splits ) { } } | if ( splits . length < 1 ) throw new IllegalArgumentException ( "Input array of split fractions must be non-empty" ) ; IntList randOrder = new IntList ( size ( ) ) ; ListUtils . addRange ( randOrder , 0 , size ( ) , 1 ) ; Collections . shuffle ( randOrder , rand ) ; int [ ] stops = new int [ splits . length ] ; double sum = 0 ; for ( int i = 0 ; i < splits . length ; i ++ ) { sum += splits [ i ] ; if ( sum >= 1.001 /* some flex room for numeric issues */
) throw new IllegalArgumentException ( "Input splits sum is greater than 1 by index " + i + " reaching a sum of " + sum ) ; stops [ i ] = ( int ) Math . round ( sum * randOrder . size ( ) ) ; } List < Type > datasets = new ArrayList < > ( splits . length ) ; int prev = 0 ; for ( int i = 0 ; i < stops . length ; i ++ ) { datasets . add ( getSubset ( randOrder . subList ( prev , stops [ i ] ) ) ) ; prev = stops [ i ] ; } return datasets ; |
public class LeftTupleSinkNodeList { /** * Removes a < code > TupleSinkNode < / code > from the list . This works by attach the previous reference to the child reference .
* When the node to be removed is the first node it calls < code > removeFirst ( ) < / code > . When the node to be removed is the last node
* it calls < code > removeLast ( ) < / code > .
* @ param node
* The < code > TupleSinkNode < / code > to be removed . */
public void remove ( final LeftTupleSinkNode node ) { } } | if ( ( this . firstNode != node ) && ( this . lastNode != node ) ) { node . getPreviousLeftTupleSinkNode ( ) . setNextLeftTupleSinkNode ( node . getNextLeftTupleSinkNode ( ) ) ; node . getNextLeftTupleSinkNode ( ) . setPreviousLeftTupleSinkNode ( node . getPreviousLeftTupleSinkNode ( ) ) ; this . size -- ; node . setPreviousLeftTupleSinkNode ( null ) ; node . setNextLeftTupleSinkNode ( null ) ; } else { if ( this . firstNode == node ) { removeFirst ( ) ; } else if ( this . lastNode == node ) { removeLast ( ) ; } } |
public class TaskServiceImpl { /** * Search for tasks based in payload and other parameters . Use sort options as ASC or DESC e . g .
* sort = name or sort = workflowId . If order is not specified , defaults to ASC .
* @ param start Start index of pagination
* @ param size Number of entries
* @ param sort Sorting type ASC | DESC
* @ param freeText Text you want to search
* @ param query Query you want to search
* @ return instance of { @ link SearchResult } */
public SearchResult < TaskSummary > search ( int start , int size , String sort , String freeText , String query ) { } } | return executionService . getSearchTasks ( query , freeText , start , size , sort ) ; |
public class ResponseQueueReader { /** * { @ inheritDoc } */
@ Override public void onError ( Throwable t ) { } } | addEntry ( "adding an error ResultQueueEntry" , ResultQueueEntry . < FlatRow > fromThrowable ( t ) ) ; markerCounter . incrementAndGet ( ) ; |
public class OracleCleaningScipts { /** * R { @ inheritDoc } */
protected Collection < String > getConstraintsAddingScripts ( ) { } } | Collection < String > scripts = new ArrayList < String > ( ) ; String constraintName = "JCR_PK_" + valueTableSuffix + " PRIMARY KEY(ID)" ; scripts . add ( "ALTER TABLE " + valueTableName + " ADD CONSTRAINT " + constraintName ) ; constraintName = "JCR_PK_" + itemTableSuffix + " PRIMARY KEY(ID)" ; scripts . add ( "ALTER TABLE " + itemTableName + " ADD CONSTRAINT " + constraintName ) ; constraintName = "JCR_FK_" + valueTableSuffix + "_PROPERTY FOREIGN KEY(PROPERTY_ID) REFERENCES " + itemTableName + "(ID)" ; scripts . add ( "ALTER TABLE " + valueTableName + " ADD CONSTRAINT " + constraintName ) ; constraintName = "JCR_PK_" + refTableSuffix + " PRIMARY KEY(NODE_ID, PROPERTY_ID, ORDER_NUM)" ; scripts . add ( "ALTER TABLE " + refTableName + " ADD CONSTRAINT " + constraintName ) ; return scripts ; |
public class AmazonWorkDocsClient { /** * Adds the specified list of labels to the given resource ( a document or folder )
* @ param createLabelsRequest
* @ return Result of the CreateLabels operation returned by the service .
* @ throws EntityNotExistsException
* The resource does not exist .
* @ throws UnauthorizedOperationException
* The operation is not permitted .
* @ throws UnauthorizedResourceAccessException
* The caller does not have access to perform the action on the resource .
* @ throws FailedDependencyException
* The AWS Directory Service cannot reach an on - premises instance . Or a dependency under the control of the
* organization is failing , such as a connected Active Directory .
* @ throws ServiceUnavailableException
* One or more of the dependencies is unavailable .
* @ throws TooManyLabelsException
* The limit has been reached on the number of labels for the specified resource .
* @ sample AmazonWorkDocs . CreateLabels
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workdocs - 2016-05-01 / CreateLabels " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateLabelsResult createLabels ( CreateLabelsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateLabels ( request ) ; |
public class CertificateCreator { /** * Utility method for generating a " standard " server certificate . Recognized by most
* browsers as valid for SSL / TLS . These certificates are generated de novo , not from
* a template , so they will not retain the structure of the original certificate and may
* not be suitable for applications that require Extended Validation / High Assurance SSL
* or other distinct extensions or EKU .
* @ param newPubKey
* @ param caCert
* @ param caPrivateKey
* @ param hostname
* @ return
* @ throws CertificateParsingException
* @ throws SignatureException
* @ throws InvalidKeyException
* @ throws CertificateExpiredException
* @ throws CertificateNotYetValidException
* @ throws CertificateException
* @ throws NoSuchAlgorithmException
* @ throws NoSuchProviderException */
@ SuppressWarnings ( { } } | "deprecation" , "unused" } ) public static X509Certificate generateStdSSLServerCertificate ( final PublicKey newPubKey , final X509Certificate caCert , final PrivateKey caPrivateKey , final String subject ) throws CertificateParsingException , SignatureException , InvalidKeyException , CertificateExpiredException , CertificateNotYetValidException , CertificateException , NoSuchAlgorithmException , NoSuchProviderException { X509V3CertificateGenerator v3CertGen = new X509V3CertificateGenerator ( ) ; v3CertGen . setSubjectDN ( new X500Principal ( subject ) ) ; v3CertGen . setSignatureAlgorithm ( CertificateCreator . SIGN_ALGO ) ; v3CertGen . setPublicKey ( newPubKey ) ; v3CertGen . setNotAfter ( new Date ( System . currentTimeMillis ( ) + 30L * 60 * 60 * 24 * 30 * 12 ) ) ; v3CertGen . setNotBefore ( new Date ( System . currentTimeMillis ( ) - 1000L * 60 * 60 * 24 * 30 * 12 ) ) ; v3CertGen . setIssuerDN ( caCert . getSubjectX500Principal ( ) ) ; // Firefox actually tracks serial numbers within a CA and refuses to validate if it sees duplicates
// This is not a secure serial number generator , ( duh ! ) but it ' s good enough for our purposes .
v3CertGen . setSerialNumber ( new BigInteger ( Long . toString ( System . currentTimeMillis ( ) ) ) ) ; v3CertGen . addExtension ( X509Extensions . BasicConstraints , true , new BasicConstraints ( false ) ) ; v3CertGen . addExtension ( X509Extensions . SubjectKeyIdentifier , false , new SubjectKeyIdentifierStructure ( newPubKey ) ) ; v3CertGen . addExtension ( X509Extensions . AuthorityKeyIdentifier , false , new AuthorityKeyIdentifierStructure ( caCert . getPublicKey ( ) ) ) ; // Firefox 2 disallows these extensions in an SSL server cert . IE7 doesn ' t care .
// v3CertGen . addExtension (
// X509Extensions . KeyUsage ,
// false ,
// new KeyUsage ( KeyUsage . dataEncipherment | KeyUsage . digitalSignature ) ) ;
DEREncodableVector typicalSSLServerExtendedKeyUsages = new DEREncodableVector ( ) ; typicalSSLServerExtendedKeyUsages . add ( new DERObjectIdentifier ( ExtendedKeyUsageConstants . serverAuth ) ) ; typicalSSLServerExtendedKeyUsages . add ( new DERObjectIdentifier ( ExtendedKeyUsageConstants . clientAuth ) ) ; typicalSSLServerExtendedKeyUsages . add ( new DERObjectIdentifier ( ExtendedKeyUsageConstants . netscapeServerGatedCrypto ) ) ; typicalSSLServerExtendedKeyUsages . add ( new DERObjectIdentifier ( ExtendedKeyUsageConstants . msServerGatedCrypto ) ) ; v3CertGen . addExtension ( X509Extensions . ExtendedKeyUsage , false , new DERSequence ( typicalSSLServerExtendedKeyUsages ) ) ; // Disabled by default . Left in comments in case this is desired .
// v3CertGen . addExtension (
// X509Extensions . AuthorityInfoAccess ,
// false ,
// new AuthorityInformationAccess ( new DERObjectIdentifier ( OID _ ID _ AD _ CAISSUERS ) ,
// new GeneralName ( GeneralName . uniformResourceIdentifier , " http : / / " + subject + " / aia " ) ) ) ;
// v3CertGen . addExtension (
// X509Extensions . CRLDistributionPoints ,
// false ,
// new CRLDistPoint ( new DistributionPoint [ ] { } ) ) ;
X509Certificate cert = v3CertGen . generate ( caPrivateKey , "BC" ) ; return cert ; |
public class HandlerList { /** * Insert a handler to the beginning of the stack .
* @ param restrictionClass restriction class
* @ param handler handler */
public void insertHandler ( Class < ? > restrictionClass , H handler ) { } } | // note that the handlers list is kept in a list that is traversed in
// backwards order .
handlers . add ( new Pair < Class < ? > , H > ( restrictionClass , handler ) ) ; |
public class DiffNode { /** * Retrieve a child that matches the given path element relative to this node .
* @ param selectors The path element of the child node to get .
* @ return The requested child node or < code > null < / code > . */
public DiffNode getChild ( final List < ElementSelector > selectors ) { } } | Assert . notEmpty ( selectors , "selectors" ) ; final ElementSelector selector = selectors . get ( 0 ) ; if ( selectors . size ( ) == 1 ) { if ( selector == RootElementSelector . getInstance ( ) ) { return isRootNode ( ) ? this : null ; } else { return getChild ( selector ) ; } } else if ( selectors . size ( ) > 1 ) { final DiffNode child ; if ( selector == RootElementSelector . getInstance ( ) ) { child = isRootNode ( ) ? this : null ; } else { child = getChild ( selector ) ; } if ( child != null ) { return child . getChild ( selectors . subList ( 1 , selectors . size ( ) ) ) ; } } return null ; |
public class StringGroovyMethods { /** * Expands all tabs into spaces . Assumes the CharSequence represents a single line of text .
* @ param self A line to expand
* @ param tabStop The number of spaces a tab represents
* @ return The expanded toString ( ) of this CharSequence
* @ see # expandLine ( String , int )
* @ since 1.8.2 */
public static String expandLine ( CharSequence self , int tabStop ) { } } | String s = self . toString ( ) ; int index ; while ( ( index = s . indexOf ( '\t' ) ) != - 1 ) { StringBuilder builder = new StringBuilder ( s ) ; int count = tabStop - index % tabStop ; builder . deleteCharAt ( index ) ; for ( int i = 0 ; i < count ; i ++ ) builder . insert ( index , " " ) ; s = builder . toString ( ) ; } return s ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcQuantitySet ( ) { } } | if ( ifcQuantitySetEClass == null ) { ifcQuantitySetEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 490 ) ; } return ifcQuantitySetEClass ; |
public class BottomSheet { /** * Adds the apps , which are able to handle a specific intent , as items to the bottom sheet . This
* causes all previously added items to be removed . When an item is clicked , the corresponding
* app is started .
* @ param activity
* The activity , the bottom sheet belongs to , as an instance of the class { @ link
* Activity } . The activity may not be null
* @ param intent
* The intent as an instance of the class { @ link Intent } . The intent may not be null */
public final void setIntent ( @ NonNull final Activity activity , @ NonNull final Intent intent ) { } } | Condition . INSTANCE . ensureNotNull ( activity , "The activity may not be null" ) ; Condition . INSTANCE . ensureNotNull ( intent , "The intent may not be null" ) ; removeAllItems ( ) ; PackageManager packageManager = activity . getPackageManager ( ) ; List < ResolveInfo > resolveInfos = packageManager . queryIntentActivities ( intent , 0 ) ; for ( int i = 0 ; i < resolveInfos . size ( ) ; i ++ ) { ResolveInfo resolveInfo = resolveInfos . get ( i ) ; addItem ( i , resolveInfo . loadLabel ( packageManager ) , resolveInfo . loadIcon ( packageManager ) ) ; } setOnItemClickListener ( createIntentClickListener ( activity , ( Intent ) intent . clone ( ) , resolveInfos ) ) ; |
public class sslcertlink { /** * Use this API to fetch all the sslcertlink resources that are configured on netscaler . */
public static sslcertlink [ ] get ( nitro_service service ) throws Exception { } } | sslcertlink obj = new sslcertlink ( ) ; sslcertlink [ ] response = ( sslcertlink [ ] ) obj . get_resources ( service ) ; return response ; |
public class DefaultClusterManager { /** * Locates the internal address of the node on which a deployment is deployed . */
private void findDeploymentAddress ( final String deploymentID , Handler < AsyncResult < String > > resultHandler ) { } } | context . execute ( new Action < String > ( ) { @ Override public String perform ( ) { synchronized ( deployments ) { JsonObject locatedInfo = null ; Collection < String > sdeploymentsInfo = deployments . get ( cluster ) ; for ( String sdeploymentInfo : sdeploymentsInfo ) { JsonObject deploymentInfo = new JsonObject ( sdeploymentInfo ) ; if ( deploymentInfo . getString ( "id" ) . equals ( deploymentID ) ) { locatedInfo = deploymentInfo ; break ; } } if ( locatedInfo != null ) { return locatedInfo . getString ( "address" ) ; } return null ; } } } , resultHandler ) ; |
public class CriteriaMapper { /** * Maps the given API Predicate object to a JPA criteria Predicate .
* @ param predicate the Predicate object
* @ return a JPA criteria Predicate */
public Predicate create ( org . cdlflex . fruit . Predicate predicate ) { } } | Path < ? > attribute = resolvePath ( predicate . getKey ( ) ) ; Object value = predicate . getValue ( ) ; Predicate jpaPredicate = create ( predicate . getOp ( ) , attribute , value ) ; return ( predicate . isNot ( ) ) ? jpaPredicate . not ( ) : jpaPredicate ; |
public class Results { /** * Creates a new result with the status { @ literal 200 - OK } with the content loaded from the
* given byte array . The result is sent as chunked .
* @ param bytes the byte array , must not be { @ code null }
* @ return a new configured result */
public static Result ok ( byte [ ] bytes ) { } } | return status ( Result . OK ) . render ( new RenderableByteArray ( bytes , true ) ) ; |
public class CmsWebdavServlet { /** * Propfind helper method . < p >
* @ param req the servlet request
* @ param elem the parent element where to add the generated subelements
* @ param item the current item where to parse the properties
* @ param type the propfind type
* @ param propertiesVector if the propfind type is find properties by
* name , then this Vector contains those properties */
private void parseProperties ( HttpServletRequest req , Element elem , I_CmsRepositoryItem item , int type , List < String > propertiesVector ) { } } | String path = item . getName ( ) ; Element responseElem = addElement ( elem , TAG_RESPONSE ) ; String status = "HTTP/1.1 " + CmsWebdavStatus . SC_OK + " " + CmsWebdavStatus . getStatusText ( CmsWebdavStatus . SC_OK ) ; // Generating href element
Element hrefElem = addElement ( responseElem , TAG_HREF ) ; String href = req . getContextPath ( ) + req . getServletPath ( ) ; if ( ( href . endsWith ( "/" ) ) && ( path . startsWith ( "/" ) ) ) { href += path . substring ( 1 ) ; } else { href += path ; } try { hrefElem . addText ( rewriteUrl ( href ) ) ; } catch ( UnsupportedEncodingException ex ) { return ; } String resourceName = path ; Element propstatElem = addElement ( responseElem , TAG_PROPSTAT ) ; Element propElem = addElement ( propstatElem , TAG_PROP ) ; switch ( type ) { case FIND_ALL_PROP : addElement ( propElem , TAG_CREATIONDATE ) . addText ( ISO8601_FORMAT . format ( new Date ( item . getCreationDate ( ) ) ) ) ; addElement ( propElem , TAG_DISPLAYNAME ) . addCDATA ( resourceName ) ; // properties only for files ( no collections )
if ( ! item . isCollection ( ) ) { addElement ( propElem , TAG_LASTMODIFIED ) . addText ( HTTP_DATE_FORMAT . format ( new Date ( item . getLastModifiedDate ( ) ) ) ) ; addElement ( propElem , TAG_CONTENTLENGTH ) . addText ( String . valueOf ( item . getContentLength ( ) ) ) ; String contentType = getServletContext ( ) . getMimeType ( item . getName ( ) ) ; if ( contentType != null ) { addElement ( propElem , TAG_CONTENTTYPE ) . addText ( contentType ) ; } addElement ( propElem , TAG_ETAG ) . addText ( getETag ( item ) ) ; addElement ( propElem , TAG_RESOURCETYPE ) ; } else { addElement ( addElement ( propElem , TAG_RESOURCETYPE ) , TAG_COLLECTION ) ; } addElement ( propElem , TAG_SOURCE ) . addText ( "" ) ; Element suppLockElem = addElement ( propElem , TAG_SUPPORTEDLOCK ) ; Element lockEntryElem = addElement ( suppLockElem , TAG_LOCKENTRY ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKSCOPE ) , CmsRepositoryLockInfo . SCOPE_EXCLUSIVE ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKTYPE ) , CmsRepositoryLockInfo . TYPE_WRITE ) ; lockEntryElem = addElement ( suppLockElem , TAG_LOCKENTRY ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKSCOPE ) , CmsRepositoryLockInfo . SCOPE_SHARED ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKTYPE ) , CmsRepositoryLockInfo . TYPE_WRITE ) ; generateLockDiscovery ( path , propElem , req ) ; addElement ( propstatElem , TAG_STATUS ) . addText ( status ) ; break ; case FIND_PROPERTY_NAMES : addElement ( propElem , TAG_CREATIONDATE ) ; addElement ( propElem , TAG_DISPLAYNAME ) ; if ( ! item . isCollection ( ) ) { addElement ( propElem , TAG_CONTENTLANGUAGE ) ; addElement ( propElem , TAG_CONTENTLENGTH ) ; addElement ( propElem , TAG_CONTENTTYPE ) ; addElement ( propElem , TAG_ETAG ) ; } addElement ( propElem , TAG_LASTMODIFIED ) ; addElement ( propElem , TAG_RESOURCETYPE ) ; addElement ( propElem , TAG_SOURCE ) ; addElement ( propElem , TAG_LOCKDISCOVERY ) ; addElement ( propstatElem , TAG_STATUS ) . addText ( status ) ; break ; case FIND_BY_PROPERTY : List < String > propertiesNotFound = new Vector < String > ( ) ; // Parse the list of properties
Iterator < String > iter = propertiesVector . iterator ( ) ; while ( iter . hasNext ( ) ) { String property = iter . next ( ) ; if ( property . equals ( TAG_CREATIONDATE ) ) { addElement ( propElem , TAG_CREATIONDATE ) . addText ( ISO8601_FORMAT . format ( new Date ( item . getCreationDate ( ) ) ) ) ; } else if ( property . equals ( TAG_DISPLAYNAME ) ) { addElement ( propElem , TAG_DISPLAYNAME ) . addCDATA ( resourceName ) ; } else if ( property . equals ( TAG_CONTENTLANGUAGE ) ) { if ( item . isCollection ( ) ) { propertiesNotFound . add ( property ) ; } else { addElement ( propElem , TAG_CONTENTLANGUAGE ) ; } } else if ( property . equals ( TAG_CONTENTLENGTH ) ) { if ( item . isCollection ( ) ) { propertiesNotFound . add ( property ) ; } else { addElement ( propElem , TAG_CONTENTLENGTH ) . addText ( ( String . valueOf ( item . getContentLength ( ) ) ) ) ; } } else if ( property . equals ( TAG_CONTENTTYPE ) ) { if ( item . isCollection ( ) ) { propertiesNotFound . add ( property ) ; } else { String contentType = item . getMimeType ( ) ; if ( contentType == null ) { contentType = getServletContext ( ) . getMimeType ( item . getName ( ) ) ; } if ( contentType != null ) { addElement ( propElem , TAG_CONTENTTYPE ) . addText ( contentType ) ; } } } else if ( property . equals ( TAG_ETAG ) ) { if ( item . isCollection ( ) ) { propertiesNotFound . add ( property ) ; } else { addElement ( propElem , TAG_ETAG ) . addText ( getETag ( item ) ) ; } } else if ( property . equals ( TAG_LASTMODIFIED ) ) { addElement ( propElem , TAG_LASTMODIFIED ) . addText ( HTTP_DATE_FORMAT . format ( new Date ( item . getLastModifiedDate ( ) ) ) ) ; } else if ( property . equals ( TAG_RESOURCETYPE ) ) { if ( item . isCollection ( ) ) { addElement ( addElement ( propElem , TAG_RESOURCETYPE ) , TAG_COLLECTION ) ; } else { addElement ( propElem , TAG_RESOURCETYPE ) ; } } else if ( property . equals ( TAG_SOURCE ) ) { addElement ( propElem , TAG_SOURCE ) . addText ( "" ) ; } else if ( property . equals ( TAG_SUPPORTEDLOCK ) ) { suppLockElem = addElement ( propElem , TAG_SUPPORTEDLOCK ) ; lockEntryElem = addElement ( suppLockElem , TAG_LOCKENTRY ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKSCOPE ) , CmsRepositoryLockInfo . SCOPE_EXCLUSIVE ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKTYPE ) , CmsRepositoryLockInfo . TYPE_WRITE ) ; lockEntryElem = addElement ( suppLockElem , TAG_LOCKENTRY ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKSCOPE ) , CmsRepositoryLockInfo . SCOPE_SHARED ) ; addElement ( addElement ( lockEntryElem , TAG_LOCKTYPE ) , CmsRepositoryLockInfo . TYPE_WRITE ) ; } else if ( property . equals ( TAG_LOCKDISCOVERY ) ) { if ( ! generateLockDiscovery ( path , propElem , req ) ) { addElement ( propElem , TAG_LOCKDISCOVERY ) ; } } else { propertiesNotFound . add ( property ) ; } } addElement ( propstatElem , TAG_STATUS ) . addText ( status ) ; if ( propertiesNotFound . size ( ) > 0 ) { status = "HTTP/1.1 " + CmsWebdavStatus . SC_NOT_FOUND + " " + CmsWebdavStatus . getStatusText ( CmsWebdavStatus . SC_NOT_FOUND ) ; propstatElem = addElement ( responseElem , TAG_PROPSTAT ) ; propElem = addElement ( propstatElem , TAG_PROP ) ; Iterator < String > notFoundIter = propertiesNotFound . iterator ( ) ; while ( notFoundIter . hasNext ( ) ) { addElement ( propElem , notFoundIter . next ( ) ) ; } addElement ( propstatElem , TAG_STATUS ) . addText ( status ) ; } break ; default : if ( LOG . isErrorEnabled ( ) ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_INVALID_PROPFIND_TYPE_0 ) ) ; } break ; } |
public class RetryTemplateBuilder { /** * Finish configuration and build resulting { @ link RetryTemplate } . For default
* behaviour and concurrency note see class - level doc of { @ link RetryTemplateBuilder } .
* The { @ code retryPolicy } of the returned { @ link RetryTemplate } is always an instance
* of { @ link CompositeRetryPolicy } , that consists of one base policy , and of
* { @ link BinaryExceptionClassifierRetryPolicy } . The motivation is : whatever base
* policy we use , exception classification is extremely recommended .
* @ return new instance of { @ link RetryTemplate } */
public RetryTemplate build ( ) { } } | RetryTemplate retryTemplate = new RetryTemplate ( ) ; // Exception classifier
BinaryExceptionClassifier exceptionClassifier = this . classifierBuilder != null ? this . classifierBuilder . build ( ) : BinaryExceptionClassifier . defaultClassifier ( ) ; // Retry policy
if ( this . baseRetryPolicy == null ) { this . baseRetryPolicy = new MaxAttemptsRetryPolicy ( ) ; } CompositeRetryPolicy finalPolicy = new CompositeRetryPolicy ( ) ; finalPolicy . setPolicies ( new RetryPolicy [ ] { this . baseRetryPolicy , new BinaryExceptionClassifierRetryPolicy ( exceptionClassifier ) } ) ; retryTemplate . setRetryPolicy ( finalPolicy ) ; // Backoff policy
if ( this . backOffPolicy == null ) { this . backOffPolicy = new NoBackOffPolicy ( ) ; } retryTemplate . setBackOffPolicy ( this . backOffPolicy ) ; // Listeners
if ( this . listeners != null ) { retryTemplate . setListeners ( this . listeners . toArray ( new RetryListener [ 0 ] ) ) ; } return retryTemplate ; |
public class DeepLearningTask2 { /** * Do the local computation : Perform one DeepLearningTask ( with run _ local = true ) iteration .
* Pass over all the data ( will be replicated in dfork ( ) here ) , and use _ sync _ fraction random rows .
* This calls DeepLearningTask ' s reduce ( ) between worker threads that update the same local model _ info via Hogwild !
* Once the computation is done , reduce ( ) will be called */
@ Override public void setupLocal ( ) { } } | super . setupLocal ( ) ; _res = new DeepLearningTask ( _jobKey , _sharedmodel , _sync_fraction , _iteration , this ) ; addToPendingCount ( 1 ) ; _res . dfork ( null , _fr , true /* run _ local */
) ; |
public class RouteFilterRulesInner { /** * Updates a route in the specified route filter .
* @ param resourceGroupName The name of the resource group .
* @ param routeFilterName The name of the route filter .
* @ param ruleName The name of the route filter rule .
* @ param routeFilterRuleParameters Parameters supplied to the update route filter rule operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < RouteFilterRuleInner > updateAsync ( String resourceGroupName , String routeFilterName , String ruleName , PatchRouteFilterRule routeFilterRuleParameters , final ServiceCallback < RouteFilterRuleInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , routeFilterName , ruleName , routeFilterRuleParameters ) , serviceCallback ) ; |
public class ThymeleafRenderingProvider { protected ITemplateResolver createTemplateResolver ( ) { } } | final ServletContextTemplateResolver resolver = newServletContextTemplateResolver ( ) ; resolver . setPrefix ( getHtmlViewPrefix ( ) ) ; resolver . setTemplateMode ( getTemplateMode ( ) ) ; resolver . setCharacterEncoding ( getEncoding ( ) ) ; resolver . setCacheable ( isCacheable ( ) ) ; return resolver ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcSIUnit ( ) { } } | if ( ifcSIUnitEClass == null ) { ifcSIUnitEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 583 ) ; } return ifcSIUnitEClass ; |
public class CLIQUESubspace { /** * Adds the specified dense unit to this subspace .
* @ param unit the unit to be added . */
public void addDenseUnit ( CLIQUEUnit unit ) { } } | int numdim = unit . dimensionality ( ) ; for ( int i = 0 ; i < numdim ; i ++ ) { BitsUtil . setI ( getDimensions ( ) , unit . getDimension ( i ) ) ; } denseUnits . add ( unit ) ; coverage += unit . numberOfFeatureVectors ( ) ; |
public class CmsContextMenu { /** * Hides this menu and all its parent menus . < p > */
public void hideAll ( ) { } } | CmsContextMenu currentMenu = this ; int i = 0 ; while ( ( currentMenu != null ) && ( i < 10 ) ) { currentMenu . hide ( ) ; currentMenu = currentMenu . getParentMenu ( ) ; i += 1 ; } |
public class AdaptableModuleFactoryImpl { /** * { @ inheritDoc } */
@ Override public Container getContainer ( File overlayDir , File cacheDirForOverlayContent , ArtifactContainer container ) { } } | com . ibm . wsspi . artifact . overlay . OverlayContainer o = getOverlayContainerFactory ( ) . createOverlay ( OverlayContainer . class , container ) ; if ( o != null ) { o . setOverlayDirectory ( cacheDirForOverlayContent , overlayDir ) ; AdaptableContainerImpl a = new AdaptableContainerImpl ( o , this ) ; return a ; } return null ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.