signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class KeystoreConfigurationFactory { /** * Set the reference to the location manager .
* Dynamic service : always use the most recent .
* @ param locSvc Location service */
@ Reference ( service = WsLocationAdmin . class , policy = ReferencePolicy . DYNAMIC , policyOption = ReferencePolicyOption . GREEDY ) protected void setLocMgr ( ServiceReference < WsLocationAdmin > locSvc ) { } } | this . locSvc . setReference ( locSvc ) ; |
public class SectionLoader { /** * Loads all bytes and information of the export section . The file on disk
* is read to fetch the information .
* @ return the export section
* @ throws IOException
* if unable to read the file
* @ throws IllegalStateException
* if unable to load section */
public ExportSection loadExportSection ( ) throws IOException { } } | Optional < ExportSection > edata = maybeLoadExportSection ( ) ; return ( ExportSection ) getOrThrow ( edata , "unable to load export section" ) ; |
public class RealSubscriptionManager { /** * Unsubscribe from all active subscriptions , and disconnect the web socket . It will not be
* possible to add new subscriptions while the { @ link SubscriptionManager } is stopping
* because we check the state in { @ link # doSubscribe ( Subscription , Callback ) } . We pass true to
* { @ link # disconnect ( boolean ) } because we want to disconnect even if , somehow , a new subscription
* is added while or after we are doing the { @ link # doUnsubscribe ( Subscription ) } loop . */
@ Override public void stop ( ) { } } | synchronized ( this ) { setStateAndNotify ( State . STOPPING ) ; ArrayList < SubscriptionRecord > values = new ArrayList < > ( subscriptions . values ( ) ) ; for ( SubscriptionRecord subscription : values ) { doUnsubscribe ( subscription . subscription ) ; } disconnect ( true ) ; } |
public class OpenSslX509KeyManagerFactory { /** * Create a new initialized { @ link OpenSslX509KeyManagerFactory } which loads its { @ link PrivateKey } directly from
* an { @ code OpenSSL engine } via the
* < a href = " https : / / www . openssl . org / docs / man1.1.0 / crypto / ENGINE _ load _ private _ key . html " > ENGINE _ load _ private _ key < / a >
* function . */
public static OpenSslX509KeyManagerFactory newEngineBased ( X509Certificate [ ] certificateChain , String password ) throws CertificateException , IOException , KeyStoreException , NoSuchAlgorithmException , UnrecoverableKeyException { } } | KeyStore store = new OpenSslKeyStore ( certificateChain . clone ( ) , false ) ; store . load ( null , null ) ; OpenSslX509KeyManagerFactory factory = new OpenSslX509KeyManagerFactory ( ) ; factory . init ( store , password == null ? null : password . toCharArray ( ) ) ; return factory ; |
public class TemplateException { /** * { @ inheritDoc } */
@ Override public List < String > getSource ( ) { } } | InputStream in = null ; try { if ( sourceUrl != null ) { in = sourceUrl . openStream ( ) ; return IOUtils . readLines ( in ) ; } } catch ( IOException e ) { throw new AmebaException ( e ) ; } finally { IOUtils . closeQuietly ( in ) ; } return Lists . newArrayList ( ) ; |
public class AffineTransformation { /** * Add a reflection along the given axis .
* @ param axis Axis number to do the reflection at . */
public void addAxisReflection ( int axis ) { } } | assert ( 0 < axis && axis <= dim ) ; // reset inverse transformation - needs recomputation .
inv = null ; // Formal :
// Matrix homTrans = Matrix . unitMatrix ( dim + 1 ) ;
// homTrans [ axis - 1 ] [ axis - 1 ] = - 1;
// trans = homTrans . times ( trans ) ;
// Faster :
for ( int i = 0 ; i <= dim ; i ++ ) { trans [ axis - 1 ] [ i ] = - trans [ axis - 1 ] [ i ] ; } |
public class FastTrig { /** * Get the sine of an angle
* @ param radians The angle
* @ return The sine of the angle */
public static double sin ( double radians ) { } } | radians = reduceSinAngle ( radians ) ; // limits angle to between - PI / 2 and + PI / 2
if ( Math . abs ( radians ) <= Math . PI / 4 ) { return Math . sin ( radians ) ; } else { return Math . cos ( Math . PI / 2 - radians ) ; } |
public class NutchResourceIndex { /** * / * ( non - Javadoc )
* @ see org . archive . wayback . ResourceIndex # query ( org . archive . wayback . core . WaybackRequest ) */
public SearchResults query ( WaybackRequest wbRequest ) throws ResourceIndexNotAvailableException , ResourceNotInArchiveException , BadQueryException , AccessControlException { } } | // Get the URL for the request :
String requestUrl = getRequestUrl ( wbRequest ) ; Document document = null ; try { // HTTP Request + parse
LOGGER . info ( "Requesting OpenSearch: " + requestUrl ) ; document = getHttpDocument ( requestUrl ) ; } catch ( IOException e ) { // TODO : better error for user :
e . printStackTrace ( ) ; throw new ResourceIndexNotAvailableException ( e . getMessage ( ) ) ; } catch ( SAXException e ) { e . printStackTrace ( ) ; throw new ResourceIndexNotAvailableException ( "Unexpected SAX: " + e . getMessage ( ) ) ; } CaptureSearchResults results ; if ( wbRequest . isReplayRequest ( ) || wbRequest . isCaptureQueryRequest ( ) ) { results = new CaptureSearchResults ( ) ; } else { // TODO : this is wrong , but needs exploration into what NutchWax
// can actually do .
throw new BadQueryException ( "Unable to perform path " + "prefix requests with this index type" ) ; } NodeList channel = getSearchChannel ( document ) ; NodeList nodes = getSearchItems ( document ) ; if ( channel == null || channel . getLength ( ) != 1 ) { // TODO : better error for user :
throw new ResourceNotInArchiveException ( "No results for " + requestUrl ) ; } if ( nodes == null ) { // TODO : better error for user :
throw new ResourceNotInArchiveException ( "No results for " + requestUrl ) ; } for ( int i = 0 ; i < nodes . getLength ( ) ; i ++ ) { Element e = ( Element ) nodes . item ( i ) ; List < CaptureSearchResult > resultsList = itemToSearchResults ( e ) ; if ( resultsList != null ) { for ( CaptureSearchResult result : resultsList ) { results . addSearchResult ( result ) ; } } } Element channelElement = ( Element ) channel . item ( 0 ) ; results . putFilter ( SearchResults . RESULTS_FIRST_RETURNED , getNodeContent ( channelElement , NUTCH_FIRST_RESULT ) ) ; results . putFilter ( SearchResults . RESULTS_NUM_RESULTS , getNodeContent ( channelElement , NUTCH_NUM_RESULTS ) ) ; results . putFilter ( SearchResults . RESULTS_NUM_RETURNED , getNodeContent ( channelElement , NUTCH_NUM_RETURNED ) ) ; results . putFilter ( SearchResults . RESULTS_REQUESTED , String . valueOf ( wbRequest . getResultsPerPage ( ) ) ) ; results . putFilter ( WaybackRequest . REQUEST_START_DATE , Timestamp . earliestTimestamp ( ) . getDateStr ( ) ) ; results . putFilter ( WaybackRequest . REQUEST_END_DATE , Timestamp . latestTimestamp ( ) . getDateStr ( ) ) ; return results ; |
public class GenericsUtils { /** * Generics visibility ( from inside context class ) :
* < ul >
* < li > Generics declared on class < / li >
* < li > Generics declared on outer class ( if current is inner ) < / li >
* < li > Constructor generics ( if inside constructor ) < / li >
* < li > Method generics ( if inside method ) < / li >
* < / ul > .
* @ param type type to check
* @ param context current context class
* @ param contextScope current context scope ( class , method , constructor )
* @ param contextSource context source object ( required for method and constructor scopes )
* @ return first variable , containing generic not visible from current class or null if no violations */
public static TypeVariable findIncompatibleVariable ( final Type type , final Class < ? > context , final GenericDeclarationScope contextScope , final GenericDeclaration contextSource ) { } } | TypeVariable res = null ; for ( TypeVariable var : findVariables ( type ) ) { final Class < ? > target = getDeclarationClass ( var ) ; // declaration class must be context or it ' s outer class ( even if outer = null equals will be correct )
if ( ! target . equals ( context ) && ! target . equals ( TypeUtils . getOuter ( context ) ) ) { res = var ; break ; } // e . g . correct class , but method generic when current context represents class
if ( ! contextScope . isCompatible ( GenericDeclarationScope . from ( var . getGenericDeclaration ( ) ) ) // e . g . method scope could match but actual methods differ
|| contextSource != var . getGenericDeclaration ( ) ) { res = var ; break ; } } return res ; |
public class SerializerBase { /** * To fire off the PI trace event
* @ param name Name of PI */
protected void fireEscapingEvent ( String name , String data ) throws org . xml . sax . SAXException { } } | if ( m_tracer != null ) { flushMyWriter ( ) ; m_tracer . fireGenerateEvent ( SerializerTrace . EVENTTYPE_PI , name , data ) ; } |
public class SpriteImpl { /** * Stretch the surface with the specified new size .
* @ param newWidth The new width .
* @ param newHeight The new height . */
protected void stretch ( int newWidth , int newHeight ) { } } | width = newWidth ; height = newHeight ; surface = Graphics . resize ( surfaceOriginal , newWidth , newHeight ) ; |
public class MainActivity { /** * Start notify . */
public void onServerStart ( String ip ) { } } | closeDialog ( ) ; mBtnStart . setVisibility ( View . GONE ) ; mBtnStop . setVisibility ( View . VISIBLE ) ; mBtnBrowser . setVisibility ( View . VISIBLE ) ; if ( ! TextUtils . isEmpty ( ip ) ) { List < String > addressList = new LinkedList < > ( ) ; mRootUrl = "http://" + ip + ":8080/" ; addressList . add ( mRootUrl ) ; addressList . add ( "http://" + ip + ":8080/login.html" ) ; mTvMessage . setText ( TextUtils . join ( "\n" , addressList ) ) ; } else { mRootUrl = null ; mTvMessage . setText ( R . string . server_ip_error ) ; } |
public class JTATransactionManagerAdapter { public Object required ( TransactionCallback callback ) throws Throwable { } } | final boolean began = begin ( ) ; try { return callback . execute ( this ) ; } finally { if ( began ) { end ( ) ; } } |
public class AtomDODeserializer { /** * Parses the id to determine a datastreamId .
* @ param id
* @ return */
private String getDatastreamId ( DigitalObject obj , Entry entry ) { } } | String entryId = entry . getId ( ) . toString ( ) ; // matches info : fedora / pid / dsid / timestamp
Pattern pattern = Pattern . compile ( "^" + Constants . FEDORA . uri + ".+?/([^/]+)/?.*" ) ; Matcher matcher = pattern . matcher ( entryId ) ; if ( matcher . find ( ) ) { return matcher . group ( 1 ) ; } else { return obj . newDatastreamID ( ) ; } |
public class PeriodList { /** * { @ inheritDoc } */
public boolean addAll ( Collection < ? extends Period > arg0 ) { } } | for ( Period p : arg0 ) { add ( p ) ; } return true ; |
public class JavaMelodyAutoConfiguration { /** * Monitoring of beans methods having the { @ link Scheduled } or { @ link Schedules } annotations .
* @ return MonitoringSpringAdvisor */
@ Bean @ ConditionalOnProperty ( prefix = JavaMelodyConfigurationProperties . PREFIX , name = "scheduled-monitoring-enabled" , matchIfMissing = true ) @ ConditionalOnMissingBean ( DefaultAdvisorAutoProxyCreator . class ) public MonitoringSpringAdvisor monitoringSpringScheduledAdvisor ( ) { } } | // scheduled - monitoring - enabled was false by default because of # 643,
// pending https : / / jira . spring . io / browse / SPR - 15562,
// but true by default since 1.76 after adding dependency spring - boot - starter - aop
return new MonitoringSpringAdvisor ( Pointcuts . union ( new AnnotationMatchingPointcut ( null , Scheduled . class ) , new AnnotationMatchingPointcut ( null , Schedules . class ) ) ) ; |
public class Num { /** * 函数具体逻辑
* @ param scope 上下文
* @ return 计算好的节点 */
@ Override public XValue call ( Scope scope ) { } } | NodeTest textFun = Scanner . findNodeTestByName ( "allText" ) ; XValue textVal = textFun . call ( scope ) ; String whole = StringUtils . join ( textVal . asList ( ) , "" ) ; Matcher matcher = numExt . matcher ( whole ) ; if ( matcher . find ( ) ) { String numStr = matcher . group ( ) ; BigDecimal num = new BigDecimal ( numStr ) ; return XValue . create ( num . doubleValue ( ) ) ; } else { return XValue . create ( null ) ; } |
public class BitOutputStream { /** * If there are some unwritten bits , pad them if necessary and write them
* out .
* @ throws IOException
* IO exception */
public void align ( ) throws IOException { } } | if ( capacity < BITS_IN_BYTE ) { ostream . write ( buffer << capacity ) ; capacity = BITS_IN_BYTE ; buffer = 0 ; len ++ ; } |
public class JSAnonymousFunction { /** * Add the specified variable to the list of parameters for this function
* signature .
* @ param sName
* Name of the parameter being added
* @ return New parameter variable */
@ Nonnull public JSVar param ( @ Nonnull @ Nonempty final String sName ) { } } | final JSVar aVar = new JSVar ( sName , null ) ; m_aParams . add ( aVar ) ; return aVar ; |
public class SyncLaunch { /** * ( non - Javadoc )
* @ see
* org . parallelj . launching . transport . tcp . command . AbstractTcpCommand # process
* ( org . apache . mina . core . session . IoSession , java . lang . String [ ] ) */
@ Override public final String process ( final IoSession session , final String ... args ) { } } | RemoteProgram remoteProgram = null ; // Get the corresponding remoteProgram
try { remoteProgram = parseCommandLine ( args ) ; final Class < ? > jobClass = ( Class < ? > ) remoteProgram . getAdapterClass ( ) ; final Launcher launcher = Launcher . getLauncher ( ) ; final Launch < ? > launch = launcher . newLaunch ( jobClass ) ; for ( IOption ioption : this . getOptions ( ) ) { ioption . process ( launch , remoteProgram ) ; } final LaunchResult launchResult = launch . synchLaunch ( ) . getLaunchResult ( ) ; String status = null ; String userErrorCode = null ; if ( launchResult == null ) { status = ProgramReturnCodes . NOTSTARTED . name ( ) ; } else { status = String . valueOf ( launchResult . getStatusCode ( ) ) ; userErrorCode = String . valueOf ( launchResult . getReturnCode ( ) ) ; } return LaunchingMessageKind . ILAUNCH0003 . getFormatedMessage ( jobClass . getCanonicalName ( ) , launch . getLaunchId ( ) , status , userErrorCode ) ; } catch ( ParseException e ) { return e . getMessage ( ) ; } catch ( ParserException e ) { return e . getFormatedMessage ( ) ; } catch ( OptionException e ) { return e . getFormatedMessage ( ) ; } catch ( LaunchException e ) { return LaunchingMessageKind . ELAUNCH0008 . format ( remoteProgram != null ? remoteProgram . getAdapterClass ( ) : "unknown" , e ) ; } catch ( Exception e ) { return LaunchingMessageKind . ELAUNCH0008 . format ( remoteProgram != null ? remoteProgram . getAdapterClass ( ) : "unknown" , e . getMessage ( ) , e ) ; } |
public class ApiOvhDbaaslogs { /** * Returns actions of specified input
* REST : GET / dbaas / logs / { serviceName } / input / { inputId } / action
* @ param serviceName [ required ] Service name
* @ param inputId [ required ] Input ID */
public ArrayList < OvhInputAction > serviceName_input_inputId_action_GET ( String serviceName , String inputId ) throws IOException { } } | String qPath = "/dbaas/logs/{serviceName}/input/{inputId}/action" ; StringBuilder sb = path ( qPath , serviceName , inputId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t3 ) ; |
public class SequenceManagerHelper { /** * Returns the root { @ link org . apache . ojb . broker . metadata . ClassDescriptor } of the inheriatance
* hierachy of the given descriptor or the descriptor itself if no inheriatance on multiple table is
* used . */
private static ClassDescriptor findInheritanceRoot ( ClassDescriptor cld ) { } } | ClassDescriptor result = cld ; if ( cld . getSuperClassDescriptor ( ) != null ) { result = findInheritanceRoot ( cld . getSuperClassDescriptor ( ) ) ; } return result ; |
public class ListT { /** * Construct an ListT from an AnyM that wraps a monad containing Lists
* @ param monads AnyM that contains a monad wrapping an List
* @ return ListT */
public static < W extends WitnessType < W > , A > ListT < W , A > of ( final AnyM < W , ? extends IndexedSequenceX < A > > monads ) { } } | return new ListT < > ( monads ) ; |
public class ContentInfo { /** * Returns a byte array representation of the data held in
* the content field . */
public byte [ ] getContentBytes ( ) throws IOException { } } | if ( content == null ) return null ; DerInputStream dis = new DerInputStream ( content . toByteArray ( ) ) ; return dis . getOctetString ( ) ; |
public class NodeConfig { /** * 根据子节点名称获得子节点
* @ param name 子节点名称
* @ return 子节点 */
public NodeConfig getChildNodeByName ( String name ) { } } | if ( name == null ) { throw new NullPointerException ( "Node name is null" ) ; } if ( childrenNodes != null && childrenNodes . size ( ) > 0 ) { for ( NodeConfig childNode : childrenNodes ) { if ( childNode . getName ( ) . equals ( name ) ) { return childNode ; } } } return null ; |
public class OutlookMessageParser { /** * Parses a . msg file provided by an input stream .
* @ param msgFileStream The . msg file as a InputStream .
* @ return A { @ link OutlookMessage } object representing the . msg file .
* @ throws IOException Thrown if the file could not be loaded or parsed . */
public OutlookMessage parseMsg ( @ Nonnull final InputStream msgFileStream ) throws IOException { } } | // the . msg file , like a file system , contains directories and documents within this directories
// we now gain access to the root node and recursively go through the complete ' filesystem ' .
final OutlookMessage msg = new OutlookMessage ( rtf2htmlConverter ) ; try { checkDirectoryEntry ( new POIFSFileSystem ( msgFileStream ) . getRoot ( ) , msg ) ; } finally { msgFileStream . close ( ) ; } convertHeaders ( msg ) ; return msg ; |
public class EventMention { /** * getter for anchor - gets
* @ generated
* @ return value of the feature */
public Anchor getAnchor ( ) { } } | if ( EventMention_Type . featOkTst && ( ( EventMention_Type ) jcasType ) . casFeat_anchor == null ) jcasType . jcas . throwFeatMissing ( "anchor" , "de.julielab.jules.types.ace.EventMention" ) ; return ( Anchor ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( EventMention_Type ) jcasType ) . casFeatCode_anchor ) ) ) ; |
public class DuoCookie { /** * Parses a base64 - encoded Duo cookie , producing a new DuoCookie object
* containing the data therein . If the given string is not a valid Duo
* cookie , an exception is thrown . Note that the cookie may be expired , and
* must be checked for expiration prior to actual use .
* @ param str
* The base64 - encoded Duo cookie to parse .
* @ return
* A new DuoCookie object containing the same data as the given
* base64 - encoded Duo cookie string .
* @ throws GuacamoleException
* If the given string is not a valid base64 - encoded Duo cookie . */
public static DuoCookie parseDuoCookie ( String str ) throws GuacamoleException { } } | // Attempt to decode data as base64
String data ; try { data = new String ( BaseEncoding . base64 ( ) . decode ( str ) , "UTF-8" ) ; } // Bail if invalid base64 is provided
catch ( IllegalArgumentException e ) { throw new GuacamoleClientException ( "Username is not correctly " + "encoded as base64." , e ) ; } // Throw hard errors if standard pieces of Java are missing
catch ( UnsupportedEncodingException e ) { throw new UnsupportedOperationException ( "Unexpected lack of " + "UTF-8 support." , e ) ; } // Verify format of provided data
Matcher matcher = COOKIE_FORMAT . matcher ( data ) ; if ( ! matcher . matches ( ) ) throw new GuacamoleClientException ( "Format of base64-encoded " + "username is invalid." ) ; // Get username and key ( simple strings )
String username = matcher . group ( USERNAME_GROUP ) ; String key = matcher . group ( INTEGRATION_KEY_GROUP ) ; // Parse expiration time
long expires ; try { expires = Long . parseLong ( matcher . group ( EXPIRATION_TIMESTAMP_GROUP ) ) ; } // Bail if expiration timestamp is not a valid long
catch ( NumberFormatException e ) { throw new GuacamoleClientException ( "Expiration timestamp is " + "not valid." , e ) ; } // Return parsed cookie
return new DuoCookie ( username , key , expires ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getMCARG ( ) { } } | if ( mcargEClass == null ) { mcargEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 413 ) ; } return mcargEClass ; |
public class SimpleHadoopFilesystemConfigStore { /** * Retrieves the { @ link Config } for the given { @ link ConfigKeyPath } by reading the { @ link # MAIN _ CONF _ FILE _ NAME }
* associated with the dataset specified by the given { @ link ConfigKeyPath } . If the { @ link Path } described by the
* { @ link ConfigKeyPath } does not exist then an empty { @ link Config } is returned .
* @ param configKey the config key path whose properties are needed .
* @ param version the configuration version in the configuration store .
* @ return a { @ link Config } for the given configKey .
* @ throws VersionDoesNotExistException if the version specified cannot be found in the { @ link ConfigStore } . */
@ Override public Config getOwnConfig ( ConfigKeyPath configKey , String version ) throws VersionDoesNotExistException { } } | Preconditions . checkNotNull ( configKey , "configKey cannot be null!" ) ; Preconditions . checkArgument ( ! Strings . isNullOrEmpty ( version ) , "version cannot be null or empty!" ) ; Path datasetDir = getDatasetDirForKey ( configKey , version ) ; Path mainConfFile = new Path ( datasetDir , MAIN_CONF_FILE_NAME ) ; try { if ( ! this . fs . exists ( mainConfFile ) ) { return ConfigFactory . empty ( ) ; } FileStatus configFileStatus = this . fs . getFileStatus ( mainConfFile ) ; if ( ! configFileStatus . isDirectory ( ) ) { try ( InputStream mainConfInputStream = this . fs . open ( configFileStatus . getPath ( ) ) ) { return ConfigFactory . parseReader ( new InputStreamReader ( mainConfInputStream , Charsets . UTF_8 ) ) ; } } return ConfigFactory . empty ( ) ; } catch ( IOException e ) { throw new RuntimeException ( String . format ( "Error while getting config for configKey: \"%s\"" , configKey ) , e ) ; } |
public class DefaultIdStrategy { /** * Registers a delegate by specifying the class name . Returns true if registration is successful . */
public < T > boolean registerDelegate ( String className , Delegate < T > delegate ) { } } | return null == delegateMapping . putIfAbsent ( className , new HasDelegate < T > ( delegate , this ) ) ; |
public class ImportDialog { /** * GEN - LAST : event _ btOkActionPerformed */
private void btSearchInputDirActionPerformed ( java . awt . event . ActionEvent evt ) // GEN - FIRST : event _ btSearchInputDirActionPerformed
{ } } | // GEN - HEADEREND : event _ btSearchInputDirActionPerformed
if ( ! "" . equals ( txtInputDir . getText ( ) ) ) { File f = new File ( txtInputDir . getText ( ) ) ; if ( f . exists ( ) && ( f . isDirectory ( ) || "zip" . equals ( Files . getFileExtension ( f . getName ( ) ) ) ) ) { fileChooser . setSelectedFile ( f ) ; } } if ( fileChooser . showDialog ( this , "Select" ) == JFileChooser . APPROVE_OPTION ) { File f = fileChooser . getSelectedFile ( ) ; txtInputDir . setText ( f . getAbsolutePath ( ) ) ; storeProperties ( ) ; } |
public class PersonGroupsImpl { /** * Queue a person group training task , the training task may not be started immediately .
* @ param personGroupId Id referencing a particular person group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > trainAsync ( String personGroupId ) { } } | return trainWithServiceResponseAsync ( personGroupId ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class BigtableTableAdminGrpcClient { /** * { @ inheritDoc } */
@ Override public void createTable ( CreateTableRequest request ) { } } | createUnaryListener ( request , createTableRpc , request . getParent ( ) ) . getBlockingResult ( ) ; |
public class AbstractDataAccess { /** * Writes some internal data into the beginning of the specified file . */
protected void writeHeader ( RandomAccessFile file , long length , int segmentSize ) throws IOException { } } | file . seek ( 0 ) ; file . writeUTF ( "GH" ) ; file . writeLong ( length ) ; file . writeInt ( segmentSize ) ; for ( int i = 0 ; i < header . length ; i ++ ) { file . writeInt ( header [ i ] ) ; } |
public class ReflectionUtils { /** * Load Class by class name . If class not found in it ' s Class loader or one of the parent class loaders - delegate to the Thread ' s ContextClassLoader
* @ param className Canonical class name
* @ return Class definition of className
* @ throws ClassNotFoundException */
public static Class < ? > loadClassByName ( String className ) throws ClassNotFoundException { } } | try { return Class . forName ( className ) ; } catch ( ClassNotFoundException e ) { return Thread . currentThread ( ) . getContextClassLoader ( ) . loadClass ( className ) ; } |
public class TriangularSolver_DDRB { /** * Performs an in - place solve operation where T is contained in a single block . < br >
* < br >
* B = T < sup > - 1 < / sup > B < br >
* < br >
* where T is a triangular matrix contained in an inner block . T or B can be transposed . T must be a single complete inner block
* and B is either a column block vector or row block vector .
* @ param blockLength Size of the inner blocks in the block matrix .
* @ param upper If T is upper or lower triangular .
* @ param T An upper or lower triangular matrix that is contained in an inner block . Not modified .
* @ param B A block aligned row or column submatrix . Modified .
* @ param transT If T is transposed or not .
* @ param transB If B is transposed or not . */
public static void solveBlock ( final int blockLength , final boolean upper , final DSubmatrixD1 T , final DSubmatrixD1 B , final boolean transT , final boolean transB ) { } } | int Trows = T . row1 - T . row0 ; if ( Trows > blockLength ) throw new IllegalArgumentException ( "T can be at most the size of a block" ) ; // number of rows in a block . The submatrix can be smaller than a block
final int blockT_rows = Math . min ( blockLength , T . original . numRows - T . row0 ) ; final int blockT_cols = Math . min ( blockLength , T . original . numCols - T . col0 ) ; int offsetT = T . row0 * T . original . numCols + blockT_rows * T . col0 ; final double dataT [ ] = T . original . data ; final double dataB [ ] = B . original . data ; if ( transB ) { if ( upper ) { if ( transT ) { throw new IllegalArgumentException ( "Operation not yet supported" ) ; } else { throw new IllegalArgumentException ( "Operation not yet supported" ) ; } } else { if ( transT ) { throw new IllegalArgumentException ( "Operation not yet supported" ) ; } else { for ( int i = B . row0 ; i < B . row1 ; i += blockLength ) { int N = Math . min ( B . row1 , i + blockLength ) - i ; int offsetB = i * B . original . numCols + N * B . col0 ; InnerTriangularSolver_DDRB . solveLTransB ( dataT , dataB , blockT_rows , N , blockT_rows , offsetT , offsetB ) ; } } } } else { if ( Trows != B . row1 - B . row0 ) throw new IllegalArgumentException ( "T and B must have the same number of rows." ) ; if ( upper ) { if ( transT ) { for ( int i = B . col0 ; i < B . col1 ; i += blockLength ) { int offsetB = B . row0 * B . original . numCols + Trows * i ; int N = Math . min ( B . col1 , i + blockLength ) - i ; InnerTriangularSolver_DDRB . solveTransU ( dataT , dataB , Trows , N , Trows , offsetT , offsetB ) ; } } else { for ( int i = B . col0 ; i < B . col1 ; i += blockLength ) { int offsetB = B . row0 * B . original . numCols + Trows * i ; int N = Math . min ( B . col1 , i + blockLength ) - i ; InnerTriangularSolver_DDRB . solveU ( dataT , dataB , Trows , N , Trows , offsetT , offsetB ) ; } } } else { if ( transT ) { for ( int i = B . col0 ; i < B . col1 ; i += blockLength ) { int offsetB = B . row0 * B . original . numCols + Trows * i ; int N = Math . min ( B . col1 , i + blockLength ) - i ; InnerTriangularSolver_DDRB . solveTransL ( dataT , dataB , Trows , N , blockT_cols , offsetT , offsetB ) ; } } else { for ( int i = B . col0 ; i < B . col1 ; i += blockLength ) { int offsetB = B . row0 * B . original . numCols + Trows * i ; int N = Math . min ( B . col1 , i + blockLength ) - i ; InnerTriangularSolver_DDRB . solveL ( dataT , dataB , Trows , N , blockT_cols , offsetT , offsetB ) ; } } } } |
public class TablePlanner { /** * Constructs a product plan of the specified trunk and this table .
* The select predicate applicable to this table is pushed down below the
* product .
* @ param trunk
* the specified trunk of join
* @ return a product plan of the trunk and this table */
public Plan makeProductPlan ( Plan trunk ) { } } | Plan p = makeSelectPlan ( ) ; return new MultiBufferProductPlan ( trunk , p , tx ) ; |
public class KeyMatcher { /** * Access the bucket for this specific character .
* @ param c
* @ return HeaderBucket */
protected KeyBucket makeBucket ( char c ) { } } | if ( c > this . buckets . length ) { // can ' t handle non - ASCII chars
return null ; } int index = c ; // if we ' re case - insensitive , push uppercase into lowercase buckets
if ( ! isCaseSensitive ( ) && ( c >= 'A' && c <= 'Z' ) ) { index += 32 ; } if ( null == this . buckets [ index ] ) { this . buckets [ index ] = new KeyBucket ( ) ; } return this . buckets [ index ] ; |
public class ConditionalCheck { /** * Ensures that a passed map as a parameter of the calling method is not empty .
* We recommend to use the overloaded method { @ link Check # notEmpty ( Object [ ] , String ) } and pass as second argument
* the name of the parameter to enhance the exception message .
* @ param condition
* condition must be { @ code true } ^ so that the check will be performed
* @ param array
* a map which should not be empty
* @ throws IllegalNullArgumentException
* if the given argument { @ code array } is { @ code null }
* @ throws IllegalEmptyArgumentException
* if the given argument { @ code array } is empty */
@ ArgumentsChecked @ Throws ( { } } | IllegalNullArgumentException . class , IllegalEmptyArgumentException . class } ) public static < T > void notEmpty ( final boolean condition , @ Nonnull final T [ ] array ) { if ( condition ) { Check . notEmpty ( array ) ; } |
public class PresentationControlImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . PRESENTATION_CONTROL__PRS_FLG : setPRSFlg ( PRS_FLG_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ; |
public class ThreadGroup { /** * Resumes all threads in this thread group .
* First , the < code > checkAccess < / code > method of this thread group is
* called with no arguments ; this may result in a security exception .
* This method then calls the < code > resume < / code > method on all the
* threads in this thread group and in all of its sub groups .
* @ exception SecurityException if the current thread is not allowed to
* access this thread group or any of the threads in the
* thread group .
* @ see java . lang . SecurityException
* @ see java . lang . Thread # resume ( )
* @ see java . lang . ThreadGroup # checkAccess ( )
* @ since JDK1.0
* @ deprecated This method is used solely in conjunction with
* < tt > Thread . suspend < / tt > and < tt > ThreadGroup . suspend < / tt > ,
* both of which have been deprecated , as they are inherently
* deadlock - prone . See { @ link Thread # suspend } for details . */
@ Deprecated @ SuppressWarnings ( "deprecation" ) public final void resume ( ) { } } | int ngroupsSnapshot ; ThreadGroup [ ] groupsSnapshot ; synchronized ( this ) { checkAccess ( ) ; for ( int i = 0 ; i < nthreads ; i ++ ) { threads [ i ] . resume ( ) ; } ngroupsSnapshot = ngroups ; if ( groups != null ) { groupsSnapshot = Arrays . copyOf ( groups , ngroupsSnapshot ) ; } else { groupsSnapshot = null ; } } for ( int i = 0 ; i < ngroupsSnapshot ; i ++ ) { groupsSnapshot [ i ] . resume ( ) ; } |
public class HelloWorldHttp2Handler { /** * If receive a frame with end - of - stream set , send a pre - canned response . */
private static void onHeadersRead ( ChannelHandlerContext ctx , Http2HeadersFrame headers ) throws Exception { } } | if ( headers . isEndStream ( ) ) { ByteBuf content = ctx . alloc ( ) . buffer ( ) ; content . writeBytes ( RESPONSE_BYTES . duplicate ( ) ) ; ByteBufUtil . writeAscii ( content , " - via HTTP/2" ) ; sendResponse ( ctx , content ) ; } |
public class Sequencer { /** * Looks to see if a sequencer has been generated for the sequence
* with the specified name . If not , it will instantiate one .
* Multiple calls to this method with the same name are guaranteed
* to receive the same sequencer object . For best performance ,
* classes should save a reference to the sequencer once they get it
* in order to avoid the overhead of a < code > HashMap < / code > lookup .
* @ param name the name of the desired sequencer
* @ return the sequencer with the specified name */
static public final Sequencer getInstance ( String name ) { } } | logger . debug ( "enter - getInstance()" ) ; try { Sequencer seq = null ; if ( sequencers . containsKey ( name ) ) { seq = sequencers . get ( name ) ; } if ( seq != null ) { return seq ; } synchronized ( sequencers ) { // redundant due to the non - synchronized calls above done for performance
if ( ! sequencers . containsKey ( name ) ) { try { seq = defaultSequencer . newInstance ( ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; return null ; } seq . setName ( name ) ; sequencers . put ( name , seq ) ; return seq ; } else { return sequencers . get ( name ) ; } } } finally { logger . debug ( "exit - getInstance()" ) ; } |
public class CPDefinitionLinkUtil { /** * Returns the cp definition link where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the matching cp definition link , or < code > null < / code > if a matching cp definition link could not be found */
public static CPDefinitionLink fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } } | return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ; |
public class BeanMap { /** * Returns the values for the BeanMap .
* @ return values for the BeanMap . The returned collection is not
* modifiable . */
@ Override public Collection < Object > values ( ) { } } | List < Object > answer = new ArrayList < Object > ( readMethods . size ( ) ) ; for ( Iterator < Object > iter = valueIterator ( ) ; iter . hasNext ( ) ; ) { answer . add ( iter . next ( ) ) ; } return answer ; |
public class SCFLImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . SCFL__LID : return LID_EDEFAULT == null ? lid != null : ! LID_EDEFAULT . equals ( lid ) ; } return super . eIsSet ( featureID ) ; |
public class InternalXbaseLexer { /** * $ ANTLR start " RULE _ INT " */
public final void mRULE_INT ( ) throws RecognitionException { } } | try { int _type = RULE_INT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // InternalXbase . g : 16906:10 : ( ' 0 ' . . ' 9 ' ( ' 0 ' . . ' 9 ' | ' _ ' ) * )
// InternalXbase . g : 16906:12 : ' 0 ' . . ' 9 ' ( ' 0 ' . . ' 9 ' | ' _ ' ) *
{ matchRange ( '0' , '9' ) ; // InternalXbase . g : 16906:21 : ( ' 0 ' . . ' 9 ' | ' _ ' ) *
loop5 : do { int alt5 = 2 ; int LA5_0 = input . LA ( 1 ) ; if ( ( ( LA5_0 >= '0' && LA5_0 <= '9' ) || LA5_0 == '_' ) ) { alt5 = 1 ; } switch ( alt5 ) { case 1 : // InternalXbase . g :
{ if ( ( input . LA ( 1 ) >= '0' && input . LA ( 1 ) <= '9' ) || input . LA ( 1 ) == '_' ) { input . consume ( ) ; } else { MismatchedSetException mse = new MismatchedSetException ( null , input ) ; recover ( mse ) ; throw mse ; } } break ; default : break loop5 ; } } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class InstanceCreator { /** * Create an instance of clazz
* @ param clazz the clazz to create an instance of
* @ param fallback a value provider which provides a fallback in case of a failure
* @ param < T > the return type
* @ return a new instance of clazz or fallback */
@ NonNull public < T > T create ( @ NonNull Class < ? extends T > clazz , @ NonNull Fallback < T > fallback ) { } } | T t = create ( clazz ) ; return t != null ? t : fallback . get ( ) ; |
public class AmazonCloudDirectoryClient { /** * Lists schema major versions applied to a directory . If < code > SchemaArn < / code > is provided , lists the minor
* version .
* @ param listAppliedSchemaArnsRequest
* @ return Result of the ListAppliedSchemaArns operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws ResourceNotFoundException
* The specified resource could not be found .
* @ throws InvalidNextTokenException
* Indicates that the < code > NextToken < / code > value is not valid .
* @ sample AmazonCloudDirectory . ListAppliedSchemaArns
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / ListAppliedSchemaArns "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ListAppliedSchemaArnsResult listAppliedSchemaArns ( ListAppliedSchemaArnsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListAppliedSchemaArns ( request ) ; |
public class BuildTasksInner { /** * Updates a build task with the specified parameters .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param buildTaskName The name of the container registry build task .
* @ param buildTaskUpdateParameters The parameters for updating a build task .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < ServiceResponse < BuildTaskInner > > updateWithServiceResponseAsync ( String resourceGroupName , String registryName , String buildTaskName , BuildTaskUpdateParameters buildTaskUpdateParameters ) { } } | if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( registryName == null ) { throw new IllegalArgumentException ( "Parameter registryName is required and cannot be null." ) ; } if ( buildTaskName == null ) { throw new IllegalArgumentException ( "Parameter buildTaskName is required and cannot be null." ) ; } if ( buildTaskUpdateParameters == null ) { throw new IllegalArgumentException ( "Parameter buildTaskUpdateParameters is required and cannot be null." ) ; } Validator . validate ( buildTaskUpdateParameters ) ; final String apiVersion = "2018-02-01-preview" ; Observable < Response < ResponseBody > > observable = service . update ( this . client . subscriptionId ( ) , resourceGroupName , registryName , buildTaskName , apiVersion , buildTaskUpdateParameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPutOrPatchResultAsync ( observable , new TypeToken < BuildTaskInner > ( ) { } . getType ( ) ) ; |
public class SftpUtil { /** * Ensures that the directory exists and is writable by deleting the
* directory and then recreate it .
* @ param muleContext
* @ param endpointName
* @ throws org . mule . api . MuleException
* @ throws java . io . IOException
* @ throws com . jcraft . jsch . SftpException */
public static void initEndpointDirectory ( MuleContext muleContext , String endpointName ) throws MuleException , IOException , SftpException { } } | SftpClient sftpClient = getSftpClient ( muleContext , endpointName ) ; try { ChannelSftp channelSftp = sftpClient . getChannelSftp ( ) ; try { recursiveDelete ( muleContext , sftpClient , endpointName , "" ) ; } catch ( IOException e ) { if ( logger . isErrorEnabled ( ) ) logger . error ( "Failed to recursivly delete endpoint " + endpointName , e ) ; } String path = getPathByEndpoint ( muleContext , sftpClient , endpointName ) ; mkDirs ( channelSftp , path ) ; } finally { sftpClient . disconnect ( ) ; if ( logger . isDebugEnabled ( ) ) logger . debug ( "Done init endpoint directory: " + endpointName ) ; } |
public class LinearClassifierFactory { /** * Sets the sigma parameter to a value that optimizes the held - out score given by < code > scorer < / code > . Search for an optimal value
* is carried out by < code > minimizer < / code >
* dataset the data set to optimize sigma on .
* kfold
* @ return an interim set of optimal weights : the weights */
public double [ ] heldOutSetSigma ( final GeneralDataset < L , F > trainSet , final GeneralDataset < L , F > devSet , final Scorer < L > scorer , LineSearcher minimizer ) { } } | featureIndex = trainSet . featureIndex ; labelIndex = trainSet . labelIndex ; // double [ ] resultWeights = null ;
Timing timer = new Timing ( ) ; NegativeScorer negativeScorer = new NegativeScorer ( trainSet , devSet , scorer , timer ) ; timer . start ( ) ; double bestSigma = minimizer . minimize ( negativeScorer ) ; System . err . println ( "##best sigma: " + bestSigma ) ; setSigma ( bestSigma ) ; return ArrayUtils . flatten ( trainWeights ( trainSet , negativeScorer . weights , true ) ) ; // make sure it ' s actually the interim weights from best sigma |
public class KriptonContentValues { /** * Key list .
* @ return the string */
public String keyList ( ) { } } | String separator = "" ; StringBuilder buffer = new StringBuilder ( ) ; String item ; for ( int i = 0 ; i < names . size ( ) ; i ++ ) { item = names . get ( i ) ; buffer . append ( separator + item ) ; separator = ", " ; } return buffer . toString ( ) ; |
public class ComputerLauncher { /** * Launches the agent for the given { @ link Computer } .
* If the agent is launched successfully , { @ link SlaveComputer # setChannel ( InputStream , OutputStream , TaskListener , Channel . Listener ) }
* should be invoked in the end to notify Hudson of the established connection .
* The operation could also fail , in which case there ' s no need to make any callback notification ,
* ( except to notify the user of the failure through { @ link StreamTaskListener } . )
* Also note that the normal return of this method call does not necessarily signify a successful launch .
* If someone programmatically calls this method and wants to find out if the launch was a success ,
* use { @ link SlaveComputer # isOnline ( ) } at the end .
* This method must operate synchronously . Asynchrony is provided by { @ link Computer # connect ( boolean ) } and
* its correct operation depends on this .
* @ param listener
* The progress of the launch , as well as any error , should be sent to this listener .
* @ throws IOException
* if the method throws an { @ link IOException } or { @ link InterruptedException } , the launch was considered
* a failure and the stack trace is reported into the listener . This handling is just so that the implementation
* of this method doesn ' t have to diligently catch those exceptions . */
public void launch ( SlaveComputer computer , TaskListener listener ) throws IOException , InterruptedException { } } | // to remain compatible with the legacy implementation that overrides the old signature
launch ( computer , cast ( listener ) ) ; |
public class NodeVector { /** * Pop a node from the tail of the vector and return the
* top of the stack after the pop .
* @ return The top of the stack after it ' s been popped */
public final int popAndTop ( ) { } } | m_firstFree -- ; m_map [ m_firstFree ] = DTM . NULL ; return ( m_firstFree == 0 ) ? DTM . NULL : m_map [ m_firstFree - 1 ] ; |
public class MethodMetadata { /** * 根据method构建method元数据
* @ param method method
* @ return method元数据 */
public static MethodMetadata build ( Method method ) { } } | return new MethodMetadata ( method . getName ( ) , method . getDeclaringClass ( ) , method , method . getParameterTypes ( ) ) ; |
public class HttpBasicAuthLogicHandler { /** * { @ inheritDoc } */
@ Override public void handleResponse ( final HttpProxyResponse response ) throws ProxyAuthException { } } | if ( response . getStatusCode ( ) != 407 ) { throw new ProxyAuthException ( "Received error response code (" + response . getStatusLine ( ) + ")." ) ; } |
public class MessageType { /** * Returns the corresponding MessageType for a given Byte .
* This method should NOT be called by any code outside the MFP component .
* It is only public so that it can be accessed by sub - packages .
* @ param aValue The Byte for which an MessageType is required .
* @ return The corresponding MessageType */
public final static MessageType getMessageType ( Byte aValue ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Value = " + aValue ) ; return set [ aValue . intValue ( ) ] ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public OBPRefCSys createOBPRefCSysFromString ( EDataType eDataType , String initialValue ) { } } | OBPRefCSys result = OBPRefCSys . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ; |
public class PeriodDuration { /** * Returns a copy of this instance with the days and duration normalized using the standard day of 24 hours .
* This normalizes the days and duration , leaving the years and months unchanged .
* The result uses a standard day length of 24 hours .
* This combines the duration seconds with the number of days and shares the total
* seconds between the two fields . For example , a period of
* " 2 days and 86401 seconds " will be normalized to " 3 days and 1 second " .
* The sign of the days and duration will be the same after normalization .
* For example , a period of " 1 day and - 172801 seconds " will be normalized to
* " - 1 day and - 1 second " .
* Note that no normalization is performed on the years or months .
* This instance is immutable and unaffected by this method call .
* @ return a { @ code PeriodDuration } based on this one with excess duration normalized to days , not null
* @ throws ArithmeticException if numeric overflow occurs */
public PeriodDuration normalizedStandardDays ( ) { } } | long totalSecs = period . getDays ( ) * SECONDS_PER_DAY + duration . getSeconds ( ) ; int splitDays = Math . toIntExact ( totalSecs / SECONDS_PER_DAY ) ; long splitSecs = totalSecs % SECONDS_PER_DAY ; if ( splitDays == period . getDays ( ) && splitSecs == duration . getSeconds ( ) ) { return this ; } return PeriodDuration . of ( period . withDays ( splitDays ) , duration . withSeconds ( splitSecs ) ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcFuelProperties ( ) { } } | if ( ifcFuelPropertiesEClass == null ) { ifcFuelPropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 258 ) ; } return ifcFuelPropertiesEClass ; |
public class CommercePaymentMethodGroupRelPersistenceImpl { /** * Returns all the commerce payment method group rels .
* @ return the commerce payment method group rels */
@ Override public List < CommercePaymentMethodGroupRel > findAll ( ) { } } | return findAll ( QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class GrantListEntry { /** * The list of operations permitted by the grant .
* @ param operations
* The list of operations permitted by the grant .
* @ see GrantOperation */
public void setOperations ( java . util . Collection < String > operations ) { } } | if ( operations == null ) { this . operations = null ; return ; } this . operations = new com . ibm . cloud . objectstorage . internal . SdkInternalList < String > ( operations ) ; |
public class LongStreamEx { /** * Produces an array containing cumulative results of applying the
* accumulation function going left to right .
* This is a terminal operation .
* For parallel stream it ' s not guaranteed that accumulator will always be
* executed in the same thread .
* This method cannot take all the advantages of parallel streams as it must
* process elements strictly left to right .
* @ param accumulator a
* < a href = " package - summary . html # NonInterference " > non - interfering
* < / a > , < a href = " package - summary . html # Statelessness " > stateless < / a >
* function for incorporating an additional element into a result
* @ return the array where the first element is the first element of this
* stream and every successor element is the result of applying
* accumulator function to the previous array element and the
* corresponding stream element . The resulting array has the same
* length as this stream .
* @ see # foldLeft ( LongBinaryOperator )
* @ since 0.5.1 */
public long [ ] scanLeft ( LongBinaryOperator accumulator ) { } } | Spliterator . OfLong spliterator = spliterator ( ) ; long size = spliterator . getExactSizeIfKnown ( ) ; LongBuffer buf = new LongBuffer ( size >= 0 && size <= Integer . MAX_VALUE ? ( int ) size : INITIAL_SIZE ) ; delegate ( spliterator ) . forEachOrdered ( i -> buf . add ( buf . size == 0 ? i : accumulator . applyAsLong ( buf . data [ buf . size - 1 ] , i ) ) ) ; return buf . toArray ( ) ; |
public class DisassociateDiscoveredResourceRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DisassociateDiscoveredResourceRequest disassociateDiscoveredResourceRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( disassociateDiscoveredResourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disassociateDiscoveredResourceRequest . getProgressUpdateStream ( ) , PROGRESSUPDATESTREAM_BINDING ) ; protocolMarshaller . marshall ( disassociateDiscoveredResourceRequest . getMigrationTaskName ( ) , MIGRATIONTASKNAME_BINDING ) ; protocolMarshaller . marshall ( disassociateDiscoveredResourceRequest . getConfigurationId ( ) , CONFIGURATIONID_BINDING ) ; protocolMarshaller . marshall ( disassociateDiscoveredResourceRequest . getDryRun ( ) , DRYRUN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CountingFlushableAppendable { /** * Soy is about to block on a future . Flush the output stream if there is anything to flush ,
* so we use the time we are blocking to transfer as many bytes as possible . */
@ Override public void beforeBlock ( ) { } } | if ( count > 0 ) { try { flush ( ) ; } catch ( IOException e ) { logger . log ( Level . SEVERE , "Flush from soy failed" , e ) ; } } |
public class Message { /** * Note that you cannot use this method to read a buffer wrapping the array returned from toByteArray as the internal representation is different !
* @ param buffer */
public void read ( ByteBuffer buffer ) { } } | Persistables . persistable ( streamableNoBuffers ( ) ) . read ( buffer ) ; final int n = getNumDataBuffers ( ) ; int lengthsPosition = buffer . position ( ) ; buffer . position ( buffer . position ( ) + 2 * n ) ; // skip positions
for ( int i = 0 ; i < n ; i ++ ) { final int size = buffer . getShort ( lengthsPosition ) & 0xFFFF ; // big - endian
lengthsPosition += 2 ; final ByteBuffer b1 = Persistables . slice ( buffer , size ) ; setDataBuffer ( i , b1 ) ; } |
public class OrderAction { /** * Moves the currently selected entity { @ code amount } positions < b > UP < / b >
* in order by pushing all entities down by one position .
* @ param amount
* The amount of positions that should be moved
* @ throws java . lang . IllegalStateException
* If no entity has been selected yet
* @ throws java . lang . IllegalArgumentException
* If the specified amount would cause the entity to go out - of - bounds
* @ return The current OrderAction sub - implementation instance
* @ see # moveTo ( int ) */
public M moveUp ( int amount ) { } } | Checks . notNegative ( amount , "Provided amount" ) ; if ( selectedPosition == - 1 ) throw new IllegalStateException ( "Cannot move until an item has been selected. Use #selectPosition first." ) ; if ( ascendingOrder ) { Checks . check ( selectedPosition - amount >= 0 , "Amount provided to move up is too large and would be out of bounds." + "Selected position: " + selectedPosition + " Amount: " + amount + " Largest Position: " + orderList . size ( ) ) ; } else { Checks . check ( selectedPosition + amount < orderList . size ( ) , "Amount provided to move up is too large and would be out of bounds." + "Selected position: " + selectedPosition + " Amount: " + amount + " Largest Position: " + orderList . size ( ) ) ; } if ( ascendingOrder ) return moveTo ( selectedPosition - amount ) ; else return moveTo ( selectedPosition + amount ) ; |
public class CmsFlexCache { /** * Returns all variations in the cache for a given resource name .
* The variations are of type String . < p >
* Useful if you want to show a list of all cached entry - variations ,
* like on the FlexCache administration page . < p >
* Only users with administrator permissions are allowed
* to perform this operation . < p >
* @ param key the resource name for which to look up the variations for
* @ param cms the CmsObject used for user authorization
* @ return a Set of cached variations ( which are of type String ) */
public Set < String > getCachedVariations ( String key , CmsObject cms ) { } } | if ( ! isEnabled ( ) || ! OpenCms . getRoleManager ( ) . hasRole ( cms , CmsRole . WORKPLACE_MANAGER ) ) { return null ; } Object o = m_keyCache . get ( key ) ; if ( o != null ) { return synchronizedCopyKeys ( ( ( CmsFlexCacheVariation ) o ) . m_map ) ; } return null ; |
public class ValidateCreditCard { /** * Convert a creditCardNumber as long to a formatted String . Currently it breaks 16 - digit numbers
* into groups of 4.
* @ param creditCardNumber number on card .
* @ return String representation of the credit card number . */
private static String toPrettyString ( long creditCardNumber ) { } } | String plain = Long . toString ( creditCardNumber ) ; // int i = findMatchingRange ( creditCardNumber ) ;
int length = plain . length ( ) ; switch ( length ) { case 12 : // 12 pattern 3-3-3-3
return plain . substring ( 0 , 3 ) + ' ' + plain . substring ( 3 , 6 ) + ' ' + plain . substring ( 6 , 9 ) + ' ' + plain . substring ( 9 , 12 ) ; case 13 : // 13 pattern 4-3-3-3
return plain . substring ( 0 , 4 ) + ' ' + plain . substring ( 4 , 7 ) + ' ' + plain . substring ( 7 , 10 ) + ' ' + plain . substring ( 10 , 13 ) ; case 14 : // 14 pattern 2-4-4-4
return plain . substring ( 0 , 2 ) + ' ' + plain . substring ( 2 , 6 ) + ' ' + plain . substring ( 6 , 10 ) + ' ' + plain . substring ( 10 , 14 ) ; case 15 : // 15 pattern 3-4-4-4
return plain . substring ( 0 , 3 ) + ' ' + plain . substring ( 3 , 7 ) + ' ' + plain . substring ( 7 , 11 ) + ' ' + plain . substring ( 11 , 15 ) ; case 16 : // 16 pattern 4-4-4-4
return plain . substring ( 0 , 4 ) + ' ' + plain . substring ( 4 , 8 ) + ' ' + plain . substring ( 8 , 12 ) + ' ' + plain . substring ( 12 , 16 ) ; case 17 : // 17 pattern 1-4-4-4-4
return plain . substring ( 0 , 1 ) + ' ' + plain . substring ( 1 , 5 ) + ' ' + plain . substring ( 5 , 9 ) + ' ' + plain . substring ( 9 , 13 ) + ' ' + plain . substring ( 13 , 17 ) ; default : // 0 . . 11 , 18 + digits long
// plain
return plain ; } // end switch |
public class EntityScanPackages { /** * Register the specified entity scan packages with the system .
* @ param registry the source registry
* @ param packageNames the package names to register */
public static void register ( BeanDefinitionRegistry registry , String ... packageNames ) { } } | Assert . notNull ( registry , "Registry must not be null" ) ; Assert . notNull ( packageNames , "PackageNames must not be null" ) ; register ( registry , Arrays . asList ( packageNames ) ) ; |
public class JMElasticsearchSearchAndCount { /** * Gets search request builder .
* @ param queryBuilder the query builder
* @ param indices the indices
* @ return the search request builder */
public SearchRequestBuilder getSearchRequestBuilder ( QueryBuilder queryBuilder , String ... indices ) { } } | return getSearchRequestBuilder ( indices , null , queryBuilder ) ; |
public class PackageManagerUtils { /** * Checks if the device has a input methods .
* @ param context the context .
* @ return { @ code true } if the device has a input methods . */
@ TargetApi ( Build . VERSION_CODES . JELLY_BEAN_MR2 ) public static boolean hasInputMethodsFeature ( Context context ) { } } | return hasInputMethodsFeature ( context . getPackageManager ( ) ) ; |
public class BootiqueSarlcMain { /** * Replies the options of the program .
* @ return the options of the program . */
public List < HelpOption > getOptions ( ) { } } | final BQRuntime runtime = createRuntime ( ) ; final ApplicationMetadata application = runtime . getInstance ( ApplicationMetadata . class ) ; final HelpOptions helpOptions = new HelpOptions ( ) ; application . getCommands ( ) . forEach ( c -> { helpOptions . add ( c . asOption ( ) ) ; c . getOptions ( ) . forEach ( o -> helpOptions . add ( o ) ) ; } ) ; application . getOptions ( ) . forEach ( o -> helpOptions . add ( o ) ) ; return helpOptions . getOptions ( ) ; |
public class Tuple2 { /** * Returns a new instance .
* @ param r
* first element
* @ param s
* second element
* @ param < R > type of first element
* @ param < S >
* type of second element
* @ return tuple */
public static < R , S > Tuple2 < R , S > create ( R r , S s ) { } } | return new Tuple2 < R , S > ( r , s ) ; |
public class ProcessContext { /** * Creates a new context using an RBAC access control model . < br >
* Users and permissions to execute transactions are randomly assigned
* to the given roles . < br >
* Each person is assigned to exactly one role .
* @ param activities The process activities .
* @ param originatorCount The number of desired originators .
* @ param roles The roles to use .
* @ return A new randomly generated Context . */
public static ProcessContext createRandomContext ( Set < String > activities , int originatorCount , List < String > roles ) { } } | Validate . notNull ( activities ) ; Validate . noNullElements ( activities ) ; Validate . notNegative ( originatorCount ) ; Validate . notNull ( roles ) ; Validate . noNullElements ( roles ) ; ProcessContext newContext = new ProcessContext ( "Random Context" ) ; newContext . setActivities ( activities ) ; List < String > cOriginators = createSubjectList ( originatorCount ) ; newContext . setSubjects ( new HashSet < > ( cOriginators ) ) ; // Create a new access control model .
newContext . setACModel ( RBACModel . createRandomModel ( cOriginators , activities , roles ) ) ; return newContext ; |
public class OpenGL3DParallelCoordinates { /** * Hack : Get / Create the style result .
* @ return Style result */
public StylingPolicy getStylePolicy ( ResultHierarchy hier , StyleLibrary stylelib ) { } } | Database db = ResultUtil . findDatabase ( hier ) ; AutomaticEvaluation . ensureClusteringResult ( db , db ) ; List < Clustering < ? extends Model > > clusterings = Clustering . getClusteringResults ( db ) ; if ( clusterings . isEmpty ( ) ) { throw new AbortException ( "No clustering result generated?!?" ) ; } return new ClusterStylingPolicy ( clusterings . get ( 0 ) , stylelib ) ; |
public class TransactionLogger { /** * Create new logging action
* This method check if there is an old instance for this thread - local
* If not - Initialize new instance and set it as this thread - local ' s instance
* @ param logger
* @ param auditor
* @ param instance
* @ return whether new instance was set to thread - local */
protected static boolean createLoggingAction ( final Logger logger , final Logger auditor , final TransactionLogger instance ) { } } | TransactionLogger oldInstance = getInstance ( ) ; if ( oldInstance == null || oldInstance . finished ) { if ( loggingKeys == null ) { synchronized ( TransactionLogger . class ) { if ( loggingKeys == null ) { logger . info ( "Initializing 'LoggingKeysHandler' class" ) ; loggingKeys = new LoggingKeysHandler ( keysPropStream ) ; } } } initInstance ( instance , logger , auditor ) ; setInstance ( instance ) ; return true ; } return false ; // Really not sure it can happen - since we arrive here in a new thread of transaction I think it ' s ThreadLocal should be empty . But leaving this code just in case . . . |
public class BasicActor { /** * public void pause ( boolean enabled ) { if ( enabled ) {
* characterControl . setEnabled ( ! enabled ) ;
* autonomousMovementControl . setEnabled ( ! enabled ) ;
* basicCharacterAnimControl . setEnabled ( ! enabled ) ;
* kinematicRagdollControl . setEnabled ( ! enabled ) ; } else { */
public boolean moveTo ( Vector3f location ) { } } | if ( autonomousMovementControl != null ) { ( ( AbstractControl ) autonomousMovementControl ) . setEnabled ( true ) ; return autonomousMovementControl . moveTo ( location ) ; } return false ; |
public class ConfigurationModule { /** * Binds a list to a specific optional / required Impl using ConfigurationModule .
* @ param opt Target optional / required Impl
* @ param implList List object to be injected
* @ param < T > a type
* @ return the configuration module */
public final < T > ConfigurationModule set ( final Impl < List > opt , final List implList ) { } } | final ConfigurationModule c = deepCopy ( ) ; c . processSet ( opt ) ; c . setImplLists . put ( opt , implList ) ; return c ; |
public class Delivery { /** * Show / Create a DialogFragment on the provided FragmentTransaction
* to be executed and shown .
* @ see android . app . DialogFragment # show ( android . app . FragmentTransaction , String )
* @ param transaction the fragment transaction used to show the dialog
* @ param tag the tag for the dialog fragment in the manager */
public void show ( FragmentTransaction transaction , String tag ) { } } | mActiveMail = generateDialogFragment ( ) ; mActiveMail . show ( transaction , tag ) ; |
public class PicoNetwork { /** * Start this VoltNetwork ' s thread . populate the verbotenThreads set
* with the id of the thread that is created */
public void start ( InputHandler ih , Set < Long > verbotenThreads ) { } } | m_ih = ih ; m_verbotenThreads = verbotenThreads ; startSetup ( ) ; m_thread . start ( ) ; |
public class StreamT { /** * / * ( non - Javadoc )
* @ see cyclops2 . monads . transformers . values . ListT # onEmpty ( java . lang . Object ) */
@ Override public StreamT < W , T > onEmpty ( final T value ) { } } | return ( StreamT < W , T > ) FoldableTransformerSeq . super . onEmpty ( value ) ; |
public class WrappedByteBuffer { /** * Puts a two - byte short into the buffer at the specified index .
* @ param index the index
* @ param v the two - byte short
* @ return the buffer */
public WrappedByteBuffer putShortAt ( int index , short v ) { } } | _checkForWriteAt ( index , 2 ) ; _buf . putShort ( index , v ) ; return this ; |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcStackTerminalTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class ServerService { /** * Import server from ovf image
* @ param config server config
* @ return OperationFuture wrapper for ServerMetadata */
public OperationFuture < ServerMetadata > importServer ( ImportServerConfig config ) { } } | BaseServerResponse response = client . importServer ( serverConverter . buildImportServerRequest ( config , config . getCustomFields ( ) . isEmpty ( ) ? null : client . getCustomFields ( ) ) ) ; return postProcessBuildServerResponse ( response , config ) ; |
public class Layout { /** * Materializes the layout , under the specified parent .
* @ param parent Parent UI element at this level of the hierarchy . May be null .
* @ param node The current layout element .
* @ param ignoreInternal Ignore internal elements . */
private void materializeChildren ( ElementBase parent , LayoutElement node , boolean ignoreInternal ) { } } | for ( LayoutNode child : node . getChildren ( ) ) { PluginDefinition def = child . getDefinition ( ) ; ElementBase element = ignoreInternal && def . isInternal ( ) ? null : createElement ( parent , child ) ; if ( element != null ) { materializeChildren ( element , ( LayoutElement ) child , false ) ; } } for ( LayoutTrigger trigger : node . getTriggers ( ) ) { ElementTrigger trg = new ElementTrigger ( ) ; trg . addTarget ( ( ElementUI ) parent ) ; createElement ( trg , trigger . getChild ( LayoutTriggerCondition . class ) ) ; createElement ( trg , trigger . getChild ( LayoutTriggerAction . class ) ) ; ( ( ElementUI ) parent ) . addTrigger ( trg ) ; } |
public class ApiResource { /** * Similar to # request , but specific for use with collection types that
* come from the API ( i . e . lists of resources ) .
* < p > Collections need a little extra work because we need to plumb request
* options and params through so that we can iterate to the next page if
* necessary . */
public static < T extends StripeCollectionInterface < ? > > T requestCollection ( String url , Map < String , Object > params , Class < T > clazz , RequestOptions options ) throws StripeException { } } | T collection = request ( RequestMethod . GET , url , params , clazz , options ) ; if ( collection != null ) { collection . setRequestOptions ( options ) ; collection . setRequestParams ( params ) ; } return collection ; |
public class JobXMLDescriptorImpl { /** * Returns all < code > decision < / code > elements
* @ return list of < code > decision < / code > */
public List < Decision < JobXMLDescriptor > > getAllDecision ( ) { } } | List < Decision < JobXMLDescriptor > > list = new ArrayList < Decision < JobXMLDescriptor > > ( ) ; List < Node > nodeList = model . get ( "decision" ) ; for ( Node node : nodeList ) { Decision < JobXMLDescriptor > type = new DecisionImpl < JobXMLDescriptor > ( this , "decision" , model , node ) ; list . add ( type ) ; } return list ; |
public class StringUtil { /** * Given a string , returns the representation of that string
* as a Java string literal . */
public static String javaQuotedLiteral ( String value ) { } } | StringBuilder b = new StringBuilder ( value . length ( ) * 2 ) ; b . append ( '"' ) ; for ( int i = 0 ; i < value . length ( ) ; i ++ ) { char c = value . charAt ( i ) ; switch ( c ) { case '"' : b . append ( "\\\"" ) ; break ; case '\\' : b . append ( "\\\\" ) ; break ; case '\n' : b . append ( "\\n" ) ; break ; case '\r' : b . append ( "\\t" ) ; break ; case '\t' : b . append ( "\\r" ) ; break ; case '\0' : b . append ( "\\000" ) ; break ; // Inserting ' \ 0 ' isn ' t safe if there ' s a digit after
default : if ( c >= 0x20 && c <= 0x7e ) { b . append ( c ) ; } else { int h1 = ( c >> 12 ) & 0xf ; int h2 = ( c >> 8 ) & 0xf ; int h3 = ( c >> 4 ) & 0xf ; int h4 = c & 0xf ; b . append ( "\\u" ) ; b . append ( hexDigit ( h1 ) ) ; b . append ( hexDigit ( h2 ) ) ; b . append ( hexDigit ( h3 ) ) ; b . append ( hexDigit ( h4 ) ) ; } break ; } } b . append ( '"' ) ; return b . toString ( ) ; |
public class ObjectByteOffsetImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } } | switch ( featureID ) { case AfplibPackage . OBJECT_BYTE_OFFSET__DIR_BY_OFF : return DIR_BY_OFF_EDEFAULT == null ? dirByOff != null : ! DIR_BY_OFF_EDEFAULT . equals ( dirByOff ) ; case AfplibPackage . OBJECT_BYTE_OFFSET__DIR_BY_HI : return DIR_BY_HI_EDEFAULT == null ? dirByHi != null : ! DIR_BY_HI_EDEFAULT . equals ( dirByHi ) ; } return super . eIsSet ( featureID ) ; |
public class XmlLineNumberParser { /** * Parses the XML .
* @ param is the XML content as an input stream
* @ param rootNames one or more root names that is used as baseline for beginning the parsing , for example camelContext to start parsing
* when Camel is discovered . Multiple names can be defined separated by comma
* @ param forceNamespace an optional namespace to force assign to each node . This may be needed for JAXB unmarshalling from XML - > POJO .
* @ return the DOM model
* @ throws Exception is thrown if error parsing */
public static Document parseXml ( final InputStream is , final String rootNames , final String forceNamespace ) throws Exception { } } | final Document doc ; SAXParser parser ; final SAXParserFactory factory = SAXParserFactory . newInstance ( ) ; parser = factory . newSAXParser ( ) ; final DocumentBuilderFactory dbf = DocumentBuilderFactory . newInstance ( ) ; // turn off validator and loading external dtd
dbf . setValidating ( false ) ; dbf . setNamespaceAware ( true ) ; dbf . setFeature ( "http://xml.org/sax/features/namespaces" , false ) ; dbf . setFeature ( "http://xml.org/sax/features/validation" , false ) ; dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-dtd-grammar" , false ) ; dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-external-dtd" , false ) ; dbf . setFeature ( "http://xml.org/sax/features/external-parameter-entities" , false ) ; dbf . setFeature ( "http://xml.org/sax/features/external-general-entities" , false ) ; final DocumentBuilder docBuilder = dbf . newDocumentBuilder ( ) ; doc = docBuilder . newDocument ( ) ; final Stack < Element > elementStack = new Stack < Element > ( ) ; final StringBuilder textBuffer = new StringBuilder ( ) ; final DefaultHandler handler = new DefaultHandler ( ) { private Locator locator ; private boolean found ; @ Override public void setDocumentLocator ( final Locator locator ) { this . locator = locator ; // Save the locator , so that it can be used later for line tracking when traversing nodes .
this . found = rootNames == null ; } private boolean isRootName ( String qName ) { for ( String root : rootNames . split ( "," ) ) { if ( qName . equals ( root ) ) { return true ; } } return false ; } @ Override public void startElement ( final String uri , final String localName , final String qName , final Attributes attributes ) throws SAXException { addTextIfNeeded ( ) ; if ( rootNames != null && ! found ) { if ( isRootName ( qName ) ) { found = true ; } } if ( found ) { Element el ; if ( forceNamespace != null ) { el = doc . createElementNS ( forceNamespace , qName ) ; } else { el = doc . createElement ( qName ) ; } for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { el . setAttribute ( attributes . getQName ( i ) , attributes . getValue ( i ) ) ; } el . setUserData ( LINE_NUMBER , String . valueOf ( this . locator . getLineNumber ( ) ) , null ) ; el . setUserData ( COLUMN_NUMBER , String . valueOf ( this . locator . getColumnNumber ( ) ) , null ) ; elementStack . push ( el ) ; } } @ Override public void endElement ( final String uri , final String localName , final String qName ) { if ( ! found ) { return ; } addTextIfNeeded ( ) ; final Element closedEl = elementStack . isEmpty ( ) ? null : elementStack . pop ( ) ; if ( closedEl != null ) { if ( elementStack . isEmpty ( ) ) { // Is this the root element ?
doc . appendChild ( closedEl ) ; } else { final Element parentEl = elementStack . peek ( ) ; parentEl . appendChild ( closedEl ) ; } closedEl . setUserData ( LINE_NUMBER_END , String . valueOf ( this . locator . getLineNumber ( ) ) , null ) ; closedEl . setUserData ( COLUMN_NUMBER_END , String . valueOf ( this . locator . getColumnNumber ( ) ) , null ) ; } } @ Override public void characters ( final char ch [ ] , final int start , final int length ) throws SAXException { textBuffer . append ( ch , start , length ) ; } @ Override public InputSource resolveEntity ( String publicId , String systemId ) throws IOException , SAXException { // do not resolve external dtd
return new InputSource ( new StringReader ( "" ) ) ; } // Outputs text accumulated under the current node
private void addTextIfNeeded ( ) { if ( textBuffer . length ( ) > 0 ) { final Element el = elementStack . isEmpty ( ) ? null : elementStack . peek ( ) ; if ( el != null ) { final Node textNode = doc . createTextNode ( textBuffer . toString ( ) ) ; el . appendChild ( textNode ) ; textBuffer . delete ( 0 , textBuffer . length ( ) ) ; } } } } ; parser . parse ( is , handler ) ; return doc ; |
public class AbstractClient { /** * { @ inheritDoc } */
@ Override public void delayedEnqueue ( final String queue , final Job job , final long future ) { } } | validateArguments ( queue , job , future ) ; try { doDelayedEnqueue ( queue , ObjectMapperFactory . get ( ) . writeValueAsString ( job ) , future ) ; } catch ( RuntimeException re ) { throw re ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class UserMessages { public void add ( String property , UserMessage message ) { } } | assertArgumentNotNull ( "property" , property ) ; assertArgumentNotNull ( "message" , message ) ; assertLocked ( ) ; final UserMessageItem item = getPropertyItem ( property ) ; final List < UserMessage > messageList ; if ( item == null ) { ++ itemCount ; messageList = new ArrayList < UserMessage > ( ) ; final String filtered = filterProperty ( property ) ; messageMap . put ( filtered , newUserMessageItem ( messageList , itemCount , filtered ) ) ; } else { messageList = item . getMessageList ( ) ; } messageList . add ( message ) ; |
public class ModifyEndpointRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ModifyEndpointRequest modifyEndpointRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( modifyEndpointRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( modifyEndpointRequest . getEndpointArn ( ) , ENDPOINTARN_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getEndpointIdentifier ( ) , ENDPOINTIDENTIFIER_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getEndpointType ( ) , ENDPOINTTYPE_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getEngineName ( ) , ENGINENAME_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getUsername ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getPassword ( ) , PASSWORD_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getServerName ( ) , SERVERNAME_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getPort ( ) , PORT_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getDatabaseName ( ) , DATABASENAME_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getExtraConnectionAttributes ( ) , EXTRACONNECTIONATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getCertificateArn ( ) , CERTIFICATEARN_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getSslMode ( ) , SSLMODE_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getServiceAccessRoleArn ( ) , SERVICEACCESSROLEARN_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getExternalTableDefinition ( ) , EXTERNALTABLEDEFINITION_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getDynamoDbSettings ( ) , DYNAMODBSETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getS3Settings ( ) , S3SETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getDmsTransferSettings ( ) , DMSTRANSFERSETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getMongoDbSettings ( ) , MONGODBSETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getKinesisSettings ( ) , KINESISSETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getElasticsearchSettings ( ) , ELASTICSEARCHSETTINGS_BINDING ) ; protocolMarshaller . marshall ( modifyEndpointRequest . getRedshiftSettings ( ) , REDSHIFTSETTINGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class RESTWebService { /** * Parse sentence .
* @ param sentence sentence .
* @ param request http servlet request .
* @ return parse tree JSON . */
@ RequestMapping ( produces = MediaType . APPLICATION_JSON_VALUE , value = "/parse" , method = RequestMethod . GET ) public String parse ( @ RequestParam ( "sentence" ) String sentence , HttpServletRequest request ) { } } | if ( sentence == null || sentence . trim ( ) . isEmpty ( ) ) { return StringUtils . EMPTY ; } sentence = sentence . trim ( ) ; LOGGER . info ( "Parse [" + sentence + "]" ) ; DTree tree = PARSER . parse ( sentence ) ; DTreeEntity parseTreeEntity = new DTreeEntity ( tree , "SAMPLE_AUTHOR" ) ; dNodeEntityRepository . save ( parseTreeEntity . dNodeEntities ) ; dTreeEntityRepository . save ( parseTreeEntity ) ; return "[" + toJSON ( tree . get ( 0 ) ) + "]" ; |
public class AppBndAuthorizationTableService { /** * Update the map for the specified group name . If the accessID is
* successfully computed , the map will be updated with the accessID .
* If the accessID can not be computed due to the user not being found ,
* INVALID _ ACCESS _ ID will be stored .
* @ param maps
* @ param group
* @ param groupNameFromRole
* @ return */
private String updateMissingGroupAccessId ( AuthzTableContainer maps , Group group , String groupNameFromRole ) { } } | String accessIdFromRole ; accessIdFromRole = getMissingAccessId ( group ) ; if ( accessIdFromRole != null ) { maps . groupToAccessIdMap . put ( groupNameFromRole , accessIdFromRole ) ; } else { // Unable to compute the accessId , store an invalid access ID indicate this
// and avoid future attempts
maps . groupToAccessIdMap . put ( groupNameFromRole , INVALID_ACCESS_ID ) ; } return accessIdFromRole ; |
public class WorkflowManagerAbstract { /** * / * ( non - Javadoc )
* @ see nz . co . senanque . workflow . WorkflowManager # findHandlerTasks ( nz . co . senanque . workflow . instances . ProcessInstance ) */
@ Transactional public List < Audit > findHandlerTasks ( ProcessInstance processInstance ) { } } | Stack < Audit > ret = new Stack < Audit > ( ) ; for ( Audit audit : processInstance . getAudits ( ) ) { if ( audit . isHandler ( ) && audit . getStatus ( ) != null ) { ret . push ( audit ) ; } } return ret ; |
public class CmsGalleryControllerHandler { /** * Deletes the html content of the types parameter and removes the style . < p >
* @ param types the types to be removed from selection */
public void onClearTypes ( List < String > types ) { } } | if ( types != null ) { m_galleryDialog . getTypesTab ( ) . uncheckTypes ( types ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.