signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PkRSS { /** * Handles the specified { @ link Request } . May throw an { @ link IOException } for * mishandled URLs or timeouts . * @ param request Request to execute . * @ throws IOException */ protected void load ( final Request request ) throws IOException { } }
log ( "load(" + request + ')' ) ; final CallbackHandler handler = request . handler != null ? request . handler : this . handler ; final boolean safe = request . safe != null ? request . safe : this . safe ; Callback callback = request . callback != null ? request . callback . get ( ) : null ; // Don ' t load if URL is the favorites key if ( request . url . equals ( KEY_FAVORITES ) ) { log ( "Favorites URL detected, skipping load..." ) ; return ; } // Notify callback handler . onPreload ( safe , callback ) ; // Create safe url for pagination / indexing purposes String safeUrl = request . downloader == null ? downloader . toSafeUrl ( request ) : request . downloader . toSafeUrl ( request ) ; // Put the page index into the request ' s HashMap pageTracker . put ( safeUrl , request . page ) ; // Get response from this request String response = request . downloader == null ? downloader . execute ( request ) : request . downloader . execute ( request ) ; // Parse articles from response and inset into global list List < Article > newArticles = request . parser == null ? parser . parse ( response ) : request . parser . parse ( response ) ; insert ( safeUrl , newArticles ) ; // Notify callback handler . onLoaded ( safe , callback , newArticles ) ;
public class UrlUtils { /** * Download data from an URL and return the raw bytes . * @ param sUrl The full URL used to download the content . * @ param timeout The timeout in milliseconds that is used for both * connection timeout and read timeout . * @ param sslFactory The SSLFactory to use for the connection , this allows to support custom SSL certificates * @ return The resulting data , e . g . a HTML string as byte array . * @ throws IOException If accessing the resource fails . */ public static byte [ ] retrieveRawData ( String sUrl , int timeout , SSLSocketFactory sslFactory ) throws IOException { } }
URL url = new URL ( sUrl ) ; LOGGER . fine ( "Using the following URL for retrieving the data: " + url . toString ( ) ) ; HttpURLConnection conn = ( HttpURLConnection ) url . openConnection ( ) ; // set specified timeout if non - zero if ( timeout != 0 ) { conn . setConnectTimeout ( timeout ) ; conn . setReadTimeout ( timeout ) ; } try { conn . setDoOutput ( false ) ; conn . setDoInput ( true ) ; if ( conn instanceof HttpsURLConnection && sslFactory != null ) { ( ( HttpsURLConnection ) conn ) . setSSLSocketFactory ( sslFactory ) ; } conn . connect ( ) ; int code = conn . getResponseCode ( ) ; if ( code != HttpURLConnection . HTTP_OK && code != HttpURLConnection . HTTP_CREATED && code != HttpURLConnection . HTTP_ACCEPTED ) { String msg = "Error " + code + " returned while retrieving response for url '" + url + "' message from client: " + conn . getResponseMessage ( ) ; LOGGER . warning ( msg ) ; throw new IOException ( msg ) ; } try ( InputStream strm = conn . getInputStream ( ) ) { return IOUtils . toByteArray ( strm ) ; } // actually read the contents , even if we are not using it to simulate a full download of the data /* ByteArrayOutputStream memStream = new ByteArrayOutputStream ( conn . getContentLength ( ) = = - 1 ? 40000 : conn . getContentLength ( ) ) ; try { byte b [ ] = new byte [ 4096 ] ; int len ; while ( ( len = strm . read ( b ) ) > 0 ) { memStream . write ( b , 0 , len ) ; } finally { memStream . close ( ) ; if ( LOGGER . isLoggable ( Level . FINE ) ) { LOGGER . fine ( " Received data , size : " + memStream . size ( ) + " ( " + conn . getContentLength ( ) + " ) first bytes : " + replaceInvalidChar ( memStream . toString ( ) . substring ( 0 , Math . min ( memStream . size ( ) , REPORT _ PEEK _ COUNT ) ) ) ) ; return memStream . toByteArray ( ) ; */ } finally { conn . disconnect ( ) ; }
public class OtpInputStream { /** * Read an integer from the stream . * @ return the integer value . * @ exception OtpErlangDecodeException * if the next term in the stream cannot be represented as * an integer . */ public int read_int ( ) throws OtpErlangDecodeException { } }
final long l = this . read_long ( false ) ; final int i = ( int ) l ; if ( l != i ) { throw new OtpErlangDecodeException ( "Value does not fit in int: " + l ) ; } return i ;
public class HiCS { /** * Calculates the actual contrast of a given subspace . * @ param relation Relation to process * @ param subspace Subspace * @ param subspaceIndex Subspace indexes */ private void calculateContrast ( Relation < ? extends NumberVector > relation , HiCSSubspace subspace , ArrayList < ArrayDBIDs > subspaceIndex , Random random ) { } }
final int card = subspace . cardinality ( ) ; final double alpha1 = FastMath . pow ( alpha , ( 1.0 / card ) ) ; final int windowsize = ( int ) ( relation . size ( ) * alpha1 ) ; final FiniteProgress prog = LOG . isDebugging ( ) ? new FiniteProgress ( "Monte-Carlo iterations" , m , LOG ) : null ; int retries = 0 ; double deviationSum = 0.0 ; for ( int i = 0 ; i < m ; i ++ ) { // Choose a random set bit . int chosen = - 1 ; for ( int tmp = random . nextInt ( card ) ; tmp >= 0 ; tmp -- ) { chosen = subspace . nextSetBit ( chosen + 1 ) ; } // initialize sample DBIDs conditionalSample = relation . getDBIDs ( ) ; for ( int j = subspace . nextSetBit ( 0 ) ; j >= 0 ; j = subspace . nextSetBit ( j + 1 ) ) { if ( j == chosen ) { continue ; } ArrayDBIDs sortedIndices = subspaceIndex . get ( j ) ; ArrayModifiableDBIDs indexBlock = DBIDUtil . newArray ( windowsize ) ; // initialize index block DBIDArrayIter iter = sortedIndices . iter ( ) ; iter . seek ( random . nextInt ( relation . size ( ) - windowsize ) ) ; for ( int k = 0 ; k < windowsize ; k ++ , iter . advance ( ) ) { indexBlock . add ( iter ) ; // select index block } conditionalSample = DBIDUtil . intersection ( conditionalSample , indexBlock ) ; } if ( conditionalSample . size ( ) < 10 ) { retries ++ ; if ( LOG . isDebugging ( ) ) { LOG . debug ( "Sample size very small. Retry no. " + retries ) ; } if ( retries >= MAX_RETRIES ) { LOG . warning ( "Too many retries, for small samples: " + retries ) ; } else { i -- ; continue ; } } // Project conditional set double [ ] sampleValues = new double [ conditionalSample . size ( ) ] ; { int l = 0 ; for ( DBIDIter iter = conditionalSample . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { sampleValues [ l ++ ] = relation . get ( iter ) . doubleValue ( chosen ) ; } } // Project full set double [ ] fullValues = new double [ relation . size ( ) ] ; { int l = 0 ; for ( DBIDIter iter = subspaceIndex . get ( chosen ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { fullValues [ l ++ ] = relation . get ( iter ) . doubleValue ( chosen ) ; } } double contrast = statTest . deviation ( fullValues , sampleValues ) ; if ( Double . isNaN ( contrast ) ) { i -- ; LOG . warning ( "Contrast was NaN" ) ; continue ; } deviationSum += contrast ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; subspace . contrast = deviationSum / m ;
public class ZoneExtension { /** * Add server event handlers */ protected void addServerEventHandlers ( ) { } }
Map < Object , Class < ? > > handlers = getServerEventHandlers ( ) ; Set < Entry < Object , Class < ? > > > entries = handlers . entrySet ( ) ; for ( Entry < Object , Class < ? > > entry : entries ) { SFSEventType type = SFSEventType . valueOf ( entry . getKey ( ) . toString ( ) ) ; ServerEventHandler handler = createServerEventHandler ( type , entry . getValue ( ) ) ; addEventHandler ( type , handler ) ; }
public class AbstractEXIBodyEncoder { /** * Event - Codes */ protected void encode1stLevelEventCode ( int pos ) throws IOException { } }
int codeLength = fidelityOptions . get1stLevelEventCodeLength ( getCurrentGrammar ( ) ) ; if ( codeLength > 0 ) { channel . encodeNBitUnsignedInteger ( pos , codeLength ) ; }
public class LogRepositoryListener { /** * ( non - Javadoc ) * @ see org . eclipse . aether . util . listener . AbstractRepositoryListener # artifactDeployed * ( org . eclipse . aether . RepositoryEvent ) */ @ Override public void artifactDeployed ( RepositoryEvent event ) { } }
log . fine ( "Deployed " + event . getArtifact ( ) + " to " + event . getRepository ( ) ) ;
public class TileSetColorImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCVAL2 ( Integer newCVAL2 ) { } }
Integer oldCVAL2 = cval2 ; cval2 = newCVAL2 ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . TILE_SET_COLOR__CVAL2 , oldCVAL2 , cval2 ) ) ;
public class ScribbleShrinkwrapHelper { /** * Creates an enterprise archive for the module in whose working directory the method is invoked , including all * compile - scoped jar archives . * @ return an enterprise containgin all dependency of the current module */ public static EnterpriseArchive createEARDeployment ( ) { } }
final EnterpriseArchive ear = ShrinkWrap . create ( EnterpriseArchive . class , "scribble-test.ear" ) ; // use local maven repository mirror only final PomEquippedResolveStage pom = Maven . configureResolver ( ) . workOffline ( ) . loadPomFromFile ( "pom.xml" ) ; final File [ ] files = pom . importDependencies ( ScopeType . COMPILE ) . resolve ( ) . withTransitivity ( ) . asFile ( ) ; for ( final File f : files ) { if ( f . getName ( ) . endsWith ( ".jar" ) ) { LOG . debug ( "Adding lib {}" , f ) ; ear . addAsLibrary ( f ) ; } } return ear ;
public class GreenPepperXmlRpcServer { /** * { @ inheritDoc } */ public String removeSpecification ( Vector < Object > specificationParams ) { } }
try { Specification specification = XmlRpcDataMarshaller . toSpecification ( specificationParams ) ; service . removeSpecification ( specification ) ; log . debug ( "Removed specification: " + specification . getName ( ) ) ; return SUCCESS ; } catch ( Exception e ) { return errorAsString ( e , SPECIFICATION_REMOVE_FAILED ) ; }
public class StringUtility { /** * Counts how many times a word appears in a line . Case insensitive matching . * @ deprecated Corrected spelling */ @ Deprecated public static int countOccurances ( byte [ ] buff , int len , String word ) { } }
return countOccurrences ( buff , len , word ) ;
public class DateIntervalFormat { /** * Set the TimeZone for the calendar used by this DateIntervalFormat object . * @ param zone The new TimeZone , will be cloned for use by this DateIntervalFormat . */ public void setTimeZone ( TimeZone zone ) { } }
// zone is cloned once for all three usages below : TimeZone zoneToSet = ( TimeZone ) zone . clone ( ) ; if ( fDateFormat != null ) { fDateFormat . setTimeZone ( zoneToSet ) ; } // fDateFormat has the master calendar for the DateIntervalFormat ; // fFromCalendar and fToCalendar are internal work clones of that calendar . if ( fFromCalendar != null ) { fFromCalendar . setTimeZone ( zoneToSet ) ; } if ( fToCalendar != null ) { fToCalendar . setTimeZone ( zoneToSet ) ; }
public class ApiOvhMe { /** * Get this object properties * REST : GET / me / order / { orderId } / debt * @ param orderId [ required ] */ public OvhDebt order_orderId_debt_GET ( Long orderId ) throws IOException { } }
String qPath = "/me/order/{orderId}/debt" ; StringBuilder sb = path ( qPath , orderId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhDebt . class ) ;
public class LevenbergMarquardt { /** * A simple test */ public static void main ( String [ ] args ) throws SolverException , CloneNotSupportedException { } }
LevenbergMarquardt optimizer = new LevenbergMarquardt ( ) { private static final long serialVersionUID = - 282626938650139518L ; // Override your objective function here @ Override public void setValues ( double [ ] parameters , double [ ] values ) { values [ 0 ] = parameters [ 0 ] * 0.0 + parameters [ 1 ] ; values [ 1 ] = parameters [ 0 ] * 2.0 + parameters [ 1 ] ; } } ; // Set solver parameters optimizer . setInitialParameters ( new double [ ] { 0 , 0 } ) ; optimizer . setWeights ( new double [ ] { 1 , 1 } ) ; optimizer . setMaxIteration ( 100 ) ; optimizer . setTargetValues ( new double [ ] { 5 , 10 } ) ; optimizer . run ( ) ; double [ ] bestParameters = optimizer . getBestFitParameters ( ) ; System . out . println ( "The solver for problem 1 required " + optimizer . getIterations ( ) + " iterations. The best fit parameters are:" ) ; for ( int i = 0 ; i < bestParameters . length ; i ++ ) { System . out . println ( "\tparameter[" + i + "]: " + bestParameters [ i ] ) ; } /* * Creating a clone , continuing the search with new target values . * Note that we do not re - define the setValues method . */ Optimizer optimizer2 = optimizer . getCloneWithModifiedTargetValues ( new double [ ] { 5.1 , 10.2 } , new double [ ] { 1 , 1 } , true ) ; optimizer2 . run ( ) ; double [ ] bestParameters2 = optimizer2 . getBestFitParameters ( ) ; System . out . println ( "The solver for problem 2 required " + optimizer2 . getIterations ( ) + " iterations. The best fit parameters are:" ) ; for ( int i = 0 ; i < bestParameters2 . length ; i ++ ) { System . out . println ( "\tparameter[" + i + "]: " + bestParameters2 [ i ] ) ; }
public class IfcStructuralLoadGroupImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcStructuralResultGroup > getSourceOfResultGroup ( ) { } }
return ( EList < IfcStructuralResultGroup > ) eGet ( Ifc2x3tc1Package . Literals . IFC_STRUCTURAL_LOAD_GROUP__SOURCE_OF_RESULT_GROUP , true ) ;
public class FastByteArrayOutputStream { /** * Writes < code > len < / code > bytes from the specified byte array * starting at offset < code > off < / code > to this stream . * @ param bytes byte [ ] the data to write to this stream . * @ param offset the start offset in the data . * @ param len the number of bytes to write . */ public void write ( byte [ ] bytes , int offset , int len ) { } }
if ( bytes == null ) { return ; } if ( ( offset < 0 ) || ( offset > bytes . length ) || ( len < 0 ) || ( ( offset + len ) - bytes . length > 0 ) ) { throw new IndexOutOfBoundsException ( "offset=" + offset + ", len=" + len + ", bytes.length=" + bytes . length ) ; } ensureCapacity ( size + len ) ; System . arraycopy ( bytes , offset , buffer , size , len ) ; size += len ;
public class ORecordLazySet { /** * Adds the item in the underlying List preserving the order of the collection . */ public boolean add ( final OIdentifiable e ) { } }
if ( e . getIdentity ( ) . isNew ( ) ) { final ORecord < ? > record = e . getRecord ( ) ; // ADD IN TEMP LIST if ( newItems == null ) newItems = new IdentityHashMap < ORecord < ? > , Object > ( ) ; else if ( newItems . containsKey ( record ) ) return false ; newItems . put ( record , NEWMAP_VALUE ) ; setDirty ( ) ; return true ; } else if ( OGlobalConfiguration . LAZYSET_WORK_ON_STREAM . getValueAsBoolean ( ) && getStreamedContent ( ) != null ) { // FAST INSERT final String ridString = e . getIdentity ( ) . toString ( ) ; final StringBuilder buffer = getStreamedContent ( ) ; if ( buffer . indexOf ( ridString ) < 0 ) { if ( buffer . length ( ) > 0 ) buffer . append ( ',' ) ; e . getIdentity ( ) . toString ( buffer ) ; setDirty ( ) ; return true ; } return false ; } else { final int pos = indexOf ( e ) ; if ( pos < 0 ) { // FOUND delegate . add ( pos * - 1 - 1 , e ) ; return true ; } return false ; }
public class ThreadPoolStage { /** * Closes resources . */ @ Override public void close ( ) { } }
if ( closed . compareAndSet ( false , true ) && numThreads > 0 ) { LOG . log ( Level . FINEST , "Closing ThreadPoolStage {0}: begin" , this . name ) ; executor . shutdown ( ) ; boolean isTerminated = false ; try { isTerminated = executor . awaitTermination ( SHUTDOWN_TIMEOUT , TimeUnit . MILLISECONDS ) ; } catch ( final InterruptedException ex ) { LOG . log ( Level . WARNING , "Interrupted closing ThreadPoolStage " + this . name , ex ) ; } if ( ! isTerminated ) { final List < Runnable > droppedRunnables = executor . shutdownNow ( ) ; LOG . log ( Level . SEVERE , "Closing ThreadPoolStage {0}: Executor did not terminate in {1} ms. Dropping {2} tasks" , new Object [ ] { this . name , SHUTDOWN_TIMEOUT , droppedRunnables . size ( ) } ) ; } if ( ! executor . isTerminated ( ) ) { LOG . log ( Level . SEVERE , "Closing ThreadPoolStage {0}: Executor failed to terminate." , this . name ) ; } LOG . log ( Level . FINEST , "Closing ThreadPoolStage {0}: end" , this . name ) ; }
public class PipeDataElement { public short [ ] extractUCharArray ( ) { } }
final byte [ ] argOut = element . value . uchar_att_value ( ) ; final short [ ] val = new short [ argOut . length ] ; final short mask = 0xFF ; for ( int i = 0 ; i < argOut . length ; i ++ ) { val [ i ] = ( short ) ( mask & argOut [ i ] ) ; } return val ;
public class HudsonPrivateSecurityRealm { /** * All users who can login to the system . */ public List < User > getAllUsers ( ) { } }
List < User > r = new ArrayList < User > ( ) ; for ( User u : User . getAll ( ) ) { if ( u . getProperty ( Details . class ) != null ) r . add ( u ) ; } Collections . sort ( r ) ; return r ;
public class Reflections { /** * 通过反射 , 获得 Class 定义中声明的父类的泛型参数的类型 。 * 如无法找到 , 返回 Object . class 。 * < pre > * public UserDao extends HibernateDao & lt ; User , Long & gt ; * < / pre > * @ param clazz * clazz The class to introspect * @ param index * the Index of the generic ddeclaration , start from 0. * @ return the index generic declaration , or Object . class if cannot be determined */ public static Class getClassGenricType ( final Class clazz , final int index ) { } }
Type genType = clazz . getGenericSuperclass ( ) ; if ( ! ( genType instanceof ParameterizedType ) ) { LOG . warn ( "{}'s superclass not ParameterizedType" , clazz . getSimpleName ( ) ) ; return Object . class ; } Type [ ] params = ( ( ParameterizedType ) genType ) . getActualTypeArguments ( ) ; if ( index >= params . length || index < 0 ) { LOG . warn ( "Index: {}, Size of {}'s Parameterized Type: {}" , index , clazz . getSimpleName ( ) , params . length ) ; return Object . class ; } if ( ! ( params [ index ] instanceof Class ) ) { LOG . warn ( "{} not set the actual class on superclass generic parameter" , clazz . getSimpleName ( ) ) ; return Object . class ; } return ( Class ) params [ index ] ;
public class UserPasswordHandler { /** * Called when a change is the record status is about to happen / has happened . * @ param field If this file change is due to a field , this is the field . * @ param iChangeType The type of change that occurred . * @ param bDisplayOption If true , display any changes . * @ return an error code . * ADD _ TYPE - Before a write . * UPDATE _ TYPE - Before an update . * DELETE _ TYPE - Before a delete . * AFTER _ UPDATE _ TYPE - After a write or update . * LOCK _ TYPE - Before a lock . * SELECT _ TYPE - After a select . * DESELECT _ TYPE - After a deselect . * MOVE _ NEXT _ TYPE - After a move . * AFTER _ REQUERY _ TYPE - Record opened . * SELECT _ EOF _ TYPE - EOF Hit . */ public int doRecordChange ( FieldInfo field , int iChangeType , boolean bDisplayOption ) { } }
if ( ( iChangeType == DBConstants . ADD_TYPE ) || ( iChangeType == DBConstants . UPDATE_TYPE ) ) { Record recUserInfo = this . getOwner ( ) ; RecordOwner recordOwner = recUserInfo . getRecordOwner ( ) ; Record recUserScreenRecord = ( Record ) recordOwner . getScreenRecord ( ) ; Task task = recordOwner . getTask ( ) ; if ( m_bCheckOldPassword ) { if ( iChangeType == DBConstants . ADD_TYPE ) return task . setLastError ( task . getString ( "Can't add a new account on this screen." ) ) ; if ( ! recUserInfo . getField ( UserInfo . PASSWORD ) . equals ( recUserScreenRecord . getField ( UserScreenRecord . CURRENT_PASSWORD ) ) ) { return task . setLastError ( task . getString ( "Error, current password was incorrect." ) ) ; } } if ( ( recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_1 ) . isModified ( ) ) || ( recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_2 ) . isModified ( ) ) ) { if ( recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_1 ) . equals ( recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_2 ) ) ) { recUserInfo . getField ( UserInfo . PASSWORD ) . moveFieldToThis ( recUserScreenRecord . getField ( UserScreenRecord . NEW_PASSWORD_1 ) ) ; } else { return task . setLastError ( task . getString ( "Error, new passwords are not equal." ) ) ; } } } return super . doRecordChange ( field , iChangeType , bDisplayOption ) ;
public class SuiteRunner { /** * { @ inheritDoc } */ public void run ( String source , String destination ) { } }
List < String > names = listDocumentsAt ( source ) ; if ( names . isEmpty ( ) ) { monitor . testRunning ( source ) ; monitor . testDone ( 0 , 0 , 0 , 0 ) ; return ; } for ( String name : names ) { runner . run ( name , flatten ( name ) ) ; }
public class OntopRepositoryConfig { /** * Checks that the fields are not missing , and that files exist and are accessible . */ private void validateFields ( ) throws RepositoryConfigException { } }
try { /* * Ontology file */ if ( owlFile . filter ( f -> ! f . exists ( ) ) . isPresent ( ) ) { throw new RepositoryConfigException ( String . format ( "The OWL file %s does not exist!" , owlFile . get ( ) . getAbsolutePath ( ) ) ) ; } if ( owlFile . filter ( f -> ! f . canRead ( ) ) . isPresent ( ) ) { throw new RepositoryConfigException ( String . format ( "The OWL file %s is not accessible!" , owlFile . get ( ) . getAbsolutePath ( ) ) ) ; } /* * Mapping file */ if ( obdaFile == null ) { throw new RepositoryConfigException ( String . format ( "No mapping file specified for repository creation " ) ) ; } if ( ! obdaFile . exists ( ) ) { throw new RepositoryConfigException ( String . format ( "The mapping file %s does not exist!" , obdaFile . getAbsolutePath ( ) ) ) ; } if ( ! obdaFile . canRead ( ) ) { throw new RepositoryConfigException ( String . format ( "The mapping file %s is not accessible!" , obdaFile . getAbsolutePath ( ) ) ) ; } /* * Properties file */ if ( propertiesFile == null ) { throw new RepositoryConfigException ( String . format ( "No properties file specified for repository creation " ) ) ; } if ( ! propertiesFile . exists ( ) ) { throw new RepositoryConfigException ( String . format ( "The properties file %s does not exist!" , propertiesFile . getAbsolutePath ( ) ) ) ; } if ( ! propertiesFile . canRead ( ) ) { throw new RepositoryConfigException ( String . format ( "The properties file %s is not accessible!" , propertiesFile . getAbsolutePath ( ) ) ) ; } if ( constraintFile . isPresent ( ) ) { File file = constraintFile . get ( ) ; if ( ! file . exists ( ) ) { throw new RepositoryConfigException ( String . format ( "The implicit key file %s does not exist!" , file . getAbsolutePath ( ) ) ) ; } if ( ! file . canRead ( ) ) { throw new RepositoryConfigException ( String . format ( "The implicit key file %s is not accessible!" , file . getAbsolutePath ( ) ) ) ; } } } /* * Sometimes thrown when there is no access right to the files . */ catch ( SecurityException e ) { throw new RepositoryConfigException ( e . getMessage ( ) ) ; }
public class UndirectedMultigraph { /** * { @ inheritDoc } */ public IntSet getNeighbors ( int vertex ) { } }
SparseTypedEdgeSet < T > edges = vertexToEdges . get ( vertex ) ; return ( edges == null ) ? PrimitiveCollections . emptyIntSet ( ) : PrimitiveCollections . unmodifiableSet ( edges . connected ( ) ) ;
public class CmsXmlPageConverter { /** * Converts the contents of a page into an xml page . < p > * @ param cms the cms object * @ param content the content used with xml templates * @ param locale the locale of the body element ( s ) * @ param encoding the encoding to the xml page * @ return the xml page content or null if conversion failed * @ throws CmsImportExportException if the body content or the XMLTEMPLATE element were not found * @ throws CmsXmlException if there is an error reading xml contents from the byte array into a document */ @ SuppressWarnings ( "unchecked" ) public static CmsXmlPage convertToXmlPage ( CmsObject cms , byte [ ] content , Locale locale , String encoding ) throws CmsImportExportException , CmsXmlException { } }
CmsXmlPage xmlPage = null ; Document page = CmsXmlUtils . unmarshalHelper ( content , null ) ; Element xmltemplate = page . getRootElement ( ) ; if ( ( xmltemplate == null ) || ! "XMLTEMPLATE" . equals ( xmltemplate . getName ( ) ) ) { throw new CmsImportExportException ( Messages . get ( ) . container ( Messages . ERR_NOT_FOUND_ELEM_XMLTEMPLATE_0 ) ) ; } // get all edittemplate nodes Iterator < Element > i = xmltemplate . elementIterator ( "edittemplate" ) ; boolean useEditTemplates = true ; if ( ! i . hasNext ( ) ) { // no edittemplate nodes found , get the template nodes i = xmltemplate . elementIterator ( "TEMPLATE" ) ; useEditTemplates = false ; } // now create the XML page xmlPage = new CmsXmlPage ( locale , encoding ) ; while ( i . hasNext ( ) ) { Element currentTemplate = i . next ( ) ; String bodyName = currentTemplate . attributeValue ( "name" ) ; if ( CmsStringUtil . isEmpty ( bodyName ) ) { // no template name found , use the parameter body name bodyName = "body" ; } String bodyContent = null ; if ( useEditTemplates ) { // no content manipulation needed for edittemplates bodyContent = currentTemplate . getText ( ) ; } else { // parse content for TEMPLATEs StringBuffer contentBuffer = new StringBuffer ( ) ; for ( Iterator < Node > k = currentTemplate . nodeIterator ( ) ; k . hasNext ( ) ; ) { Node n = k . next ( ) ; if ( n . getNodeType ( ) == Node . CDATA_SECTION_NODE ) { contentBuffer . append ( n . getText ( ) ) ; continue ; } else if ( n . getNodeType ( ) == Node . ELEMENT_NODE ) { if ( "LINK" . equals ( n . getName ( ) ) ) { contentBuffer . append ( OpenCms . getSystemInfo ( ) . getOpenCmsContext ( ) ) ; contentBuffer . append ( n . getText ( ) ) ; continue ; } } } bodyContent = contentBuffer . toString ( ) ; } if ( bodyContent == null ) { throw new CmsImportExportException ( Messages . get ( ) . container ( Messages . ERR_BODY_CONTENT_NOT_FOUND_0 ) ) ; } bodyContent = CmsStringUtil . substitute ( bodyContent , CmsStringUtil . MACRO_OPENCMS_CONTEXT , OpenCms . getSystemInfo ( ) . getOpenCmsContext ( ) ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( bodyContent ) ) { xmlPage . addValue ( bodyName , locale ) ; xmlPage . setStringValue ( cms , bodyName , locale , bodyContent ) ; } } return xmlPage ;
public class ConfigOptionParser { /** * Compares the argument with the { @ link CommandLineOption } flags and inserts an object into the parsedOptions map * Terminates with a sane help message if a parse is unsuccessful * @ param arg the current word from the command line argument list * @ param co the config option to look for in the argument * @ param configList the global config list , used to create a sane help message if the parse fails */ private boolean commandLineLookup ( String arg , ConfigOption co , List < ConfigOption > configList ) { } }
if ( arg . startsWith ( co . getCommandLineOption ( ) . getLongFlag ( ) ) || ( co . getCommandLineOption ( ) . hasShortFlag ( ) && arg . startsWith ( co . getCommandLineOption ( ) . getShortFlag ( ) ) ) ) { if ( co . getCommandLineOption ( ) . hasArgument ( ) ) { String [ ] formatArgs = arg . split ( co . getCommandLineOption ( ) . getDelimiter ( ) ) ; if ( formatArgs . length < 2 ) { System . err . println ( "Anticipated argument after " + co . getCommandLineOption ( ) . showFlagInfo ( ) + ", terminating." ) ; printUsageAndExit ( configList ) ; } Object value = co . toObject ( formatArgs [ 1 ] ) ; if ( value == null ) { System . err . println ( "Parse error for flag " + co . getCommandLineOption ( ) . showFlagInfo ( ) + " got " + formatArgs [ 1 ] ) ; printUsageAndExit ( configList ) ; } log . debug ( "setting the argument value: " + co . getLongName ( ) + " to " + value ) ; parsedOptions . put ( co . getLongName ( ) , value ) ; } else { log . debug ( "setting the default value of " + co . getLongName ( ) + " to " + co . getValue ( ) ) ; parsedOptions . put ( co . getLongName ( ) , co . getValue ( ) ) ; } return true ; } return false ;
public class WHandlerAndJsp { /** * < p > Generic request handler . < / p > * @ param pReq Http Servlet Request * @ param pResp Http Servlet Response * @ throws ServletException ServletException * @ throws IOException IOException */ public final void doWork ( final HttpServletRequest pReq , final HttpServletResponse pResp ) throws ServletException , IOException { } }
pReq . setCharacterEncoding ( "UTF-8" ) ; pResp . setCharacterEncoding ( "UTF-8" ) ; try { HashMap < String , Object > reqVars = new HashMap < String , Object > ( ) ; HttpRequestData requestData = new HttpRequestData ( pReq , pResp ) ; requestData . setAttribute ( "reqVars" , reqVars ) ; IHandlerRequest hndlI18nRequest = ( IHandlerRequest ) this . factoryAppBeans . lazyGet ( "hndlI18nRequest" ) ; hndlI18nRequest . handle ( reqVars , requestData ) ; String nmRnd = pReq . getParameter ( "nmRnd" ) ; if ( this . defaultJsp != null && "index" . equals ( this . defaultJsp ) ) { // TODO extract into handler if ( pReq . getParameter ( "logoff" ) != null ) { pReq . getSession ( ) . invalidate ( ) ; } @ SuppressWarnings ( "rawtypes" ) SrvAddTheFirstUser srvAddFiU = ( SrvAddTheFirstUser ) getServletContext ( ) . getAttribute ( "srvAddTheFirstUser" ) ; if ( srvAddFiU != null && ! srvAddFiU . checkIsThereAnyUser ( ) ) { nmRnd = "addFirstUser" ; } } if ( nmRnd == null ) { nmRnd = this . defaultJsp ; } if ( this . handlerName != null ) { IHandlerRequest handleRequest = ( IHandlerRequest ) this . factoryAppBeans . lazyGet ( this . handlerName ) ; handleRequest . handle ( reqVars , requestData ) ; } ISrvI18n srvI18n = ( ISrvI18n ) this . factoryAppBeans . lazyGet ( "ISrvI18n" ) ; UtlJsp utlJsp = ( UtlJsp ) this . factoryAppBeans . lazyGet ( "UtlJsp" ) ; pReq . setAttribute ( "srvI18n" , srvI18n ) ; pReq . setAttribute ( "utlJsp" , utlJsp ) ; // renderer maybe overridden : String nmRnRd = ( String ) pReq . getAttribute ( "nmRnd" ) ; if ( nmRnRd != null ) { nmRnd = nmRnRd ; } // processor can set redirect servlet , e . g . transactional to non - one : String srvlRd = ( String ) pReq . getAttribute ( "srvlRed" ) ; if ( srvlRd != null ) { RequestDispatcher rd = getServletContext ( ) . getRequestDispatcher ( srvlRd ) ; rd . forward ( pReq , pResp ) ; } else { String path = dirJsp + nmRnd + ".jsp" ; RequestDispatcher rd = getServletContext ( ) . getRequestDispatcher ( path ) ; rd . include ( pReq , pResp ) ; } } catch ( Exception e ) { if ( this . factoryAppBeans != null ) { ILog logger = null ; try { logger = ( ILog ) this . factoryAppBeans . lazyGet ( "ILog" ) ; logger . error ( null , getClass ( ) , "WORK" , e ) ; } catch ( Exception e1 ) { e1 . printStackTrace ( ) ; e . printStackTrace ( ) ; } } else { e . printStackTrace ( ) ; } if ( e instanceof ExceptionWithCode ) { pReq . setAttribute ( "error_code" , ( ( ExceptionWithCode ) e ) . getCode ( ) ) ; pReq . setAttribute ( "short_message" , ( ( ExceptionWithCode ) e ) . getShortMessage ( ) ) ; } else { pReq . setAttribute ( "error_code" , HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } pReq . setAttribute ( "javax.servlet.error.status_code" , HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; pReq . setAttribute ( "javax.servlet.error.exception" , e ) ; pReq . setAttribute ( "javax.servlet.error.request_uri" , pReq . getRequestURI ( ) ) ; pReq . setAttribute ( "javax.servlet.error.servlet_name" , this . getClass ( ) . getCanonicalName ( ) ) ; pResp . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; }
public class CxxLanguage { /** * Get language specific metric * @ throws IllegalStateException if metric was not registered */ public < G extends Serializable > Metric < G > getMetric ( CxxMetricsFactory . Key metricKey ) { } }
Metric < G > metric = ( Metric < G > ) this . langSpecificMetrics . get ( metricKey ) ; if ( metric == null ) { throw new IllegalStateException ( "Requested metric " + metricKey + " couldn't be found" ) ; } return metric ;
public class GroovyShell { /** * Parses the given script and returns it ready to be run . When running in a secure environment * ( - Djava . security . manager ) codeSource . getCodeSource ( ) determines what policy grants should be * given to the script . * @ param codeSource * @ return ready to run script */ public Script parse ( final GroovyCodeSource codeSource ) throws CompilationFailedException { } }
return InvokerHelper . createScript ( parseClass ( codeSource ) , context ) ;
public class Closure { /** * Support for Closure " right " currying . * Parameters are supplied on the right rather than left as per the normal curry ( ) method . * Typical usage : * < pre class = " groovyTestCase " > * def divide = { a , b { @ code - > } a / b } * def halver = divide . rcurry ( 2) * assert halver ( 8 ) = = 4 * < / pre > * The position of the curried parameters will be calculated lazily , for example , * if two overloaded doCall methods are available , the supplied arguments plus the * curried arguments will be concatenated and the result used for method selection . * @ param arguments the arguments to bind * @ return the new closure with its arguments bound * @ see # curry ( Object . . . ) */ public Closure < V > rcurry ( final Object ... arguments ) { } }
return new CurriedClosure < V > ( - arguments . length , this , arguments ) ;
public class ApplicationDecorator { /** * < p > start . < / p > * @ throws java . lang . Exception if any . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public void start ( ) throws Exception { factory = new DefaultWebApplicationFactory ( bundleContext , applicationClass , applicationName , mountPoint , contextParams ) ; factory . register ( ) ; LOGGER . info ( "Successfully registered application factory" ) ;
public class OneShotSQLGeneratorEngine { /** * Generates the full SQL query . * An important part of this program is { @ link # generateQueryFromRules } * that will create a view for every ans predicate in the input Datalog program . * @ param signature is the list of main columns in the ResultSet * @ param ruleIndex maps intentional predicates to its rules * @ param predicatesInBottomUp the topologically ordered predicates in the program * @ param extensionalPredicates are the predicates that are not defined by any rule * @ return */ private String generateQuery ( List < String > signature , Multimap < Predicate , CQIE > ruleIndex , List < Predicate > predicatesInBottomUp , List < Predicate > extensionalPredicates ) throws OntopReformulationException { } }
final TypeExtractor . TypeResults typeResults ; try { typeResults = typeExtractor . extractTypes ( ruleIndex , predicatesInBottomUp , metadata ) ; /* * Currently , incompatible terms are treated as a reformulation error */ } catch ( IncompatibleTermException e ) { throw new OntopTypingException ( e . getMessage ( ) ) ; } ImmutableMap < CQIE , ImmutableList < Optional < TermType > > > termTypeMap = typeResults . getTermTypeMap ( ) ; ImmutableMap < Predicate , ImmutableList < TermType > > castTypeMap = typeResults . getCastTypeMap ( ) ; AtomicInteger viewCounter = new AtomicInteger ( 0 ) ; // non - top - level intensional predicates - need to create subqueries ImmutableMap . Builder < Predicate , FromItem > subQueryDefinitionsBuilder = ImmutableMap . builder ( ) ; Set < RelationID > usedAliases = new HashSet < > ( ) ; // create a view for every ans predicate in the Datalog input program . int topLevel = predicatesInBottomUp . size ( ) - 1 ; for ( int i = 0 ; i < topLevel ; i ++ ) { Predicate pred = predicatesInBottomUp . get ( i ) ; if ( ! extensionalPredicates . contains ( pred ) ) { // extensional predicates are defined by DBs , so , we skip them /* * handle the semantics of OPTIONAL when there * are multiple mappings or Unions . It will take mappings of the form * < ul > * < li > Concept < - definition1 < / li > * < li > Concept < - definition2 < / li > * < / ul > * And will generate a view of the form * < ul > * < li > QConceptView = definition1 UNION definition2 * < / ul > * This view is stored in the < code > metadata < / code > . See DBMetadata * The idea is to use the view definition in the case of Union in the * Optionals / LeftJoins */ // all have the same arity int size = ruleIndex . get ( pred ) . iterator ( ) . next ( ) . getHead ( ) . getArity ( ) ; // create signature ImmutableList . Builder < String > varListBuilder = ImmutableList . builder ( ) ; for ( int k = 0 ; k < size ; k ++ ) { varListBuilder . add ( "v" + k ) ; } ImmutableList < SignatureVariable > s = createSignature ( varListBuilder . build ( ) , castTypeMap . get ( pred ) ) ; // Creates the body of the subquery String subQuery = generateQueryFromRules ( ruleIndex . get ( pred ) , s , subQueryDefinitionsBuilder . build ( ) , termTypeMap , false , viewCounter ) ; RelationID subQueryAlias = createAlias ( pred . getName ( ) , VIEW_ANS_SUFFIX , usedAliases ) ; usedAliases . add ( subQueryAlias ) ; ImmutableList . Builder < QualifiedAttributeID > columnsBuilder = ImmutableList . builder ( ) ; for ( SignatureVariable var : s ) { for ( String alias : var . columnAliases ) { columnsBuilder . add ( new QualifiedAttributeID ( subQueryAlias , metadata . getQuotedIDFactory ( ) . createAttributeID ( alias ) ) ) ; } } FromItem item = new FromItem ( subQueryAlias , inBrackets ( subQuery ) , columnsBuilder . build ( ) ) ; subQueryDefinitionsBuilder . put ( pred , item ) ; } } // top - level intensional predicate Predicate topLevelPredicate = predicatesInBottomUp . get ( topLevel ) ; ImmutableList < SignatureVariable > topSignature = createSignature ( signature , castTypeMap . get ( topLevelPredicate ) ) ; return generateQueryFromRules ( ruleIndex . get ( topLevelPredicate ) , topSignature , subQueryDefinitionsBuilder . build ( ) , termTypeMap , isDistinct && ! distinctResultSet , viewCounter ) ;
public class Person { /** * The depending roles for the user are set for the given JAAS system . All * roles are added to the loaded roles in the cache of this person . * @ param _ jaasSystem JAAS system for which the roles are set * @ param _ roles set of roles to set for the JAAS system * @ see # assignRoleInDb * @ see # unassignRoleInDb * @ throws EFapsException from calling methods */ public void setRoles ( final JAASSystem _jaasSystem , final Set < Role > _roles ) throws EFapsException { } }
if ( _jaasSystem == null ) { throw new EFapsException ( getClass ( ) , "setRoles.nojaasSystem" , getName ( ) ) ; } if ( _roles == null ) { throw new EFapsException ( getClass ( ) , "setRoles.noRoles" , getName ( ) ) ; } for ( final Role role : _roles ) { add ( role ) ; } // current roles final Set < Role > rolesInDb = getRolesFromDB ( _jaasSystem ) ; // compare new roles with current roles ( add missing roles ) for ( final Role role : _roles ) { if ( ! rolesInDb . contains ( role ) ) { assignRoleInDb ( _jaasSystem , role ) ; } } // compare current roles with new roles ( remove roles which are to much ) for ( final Role role : rolesInDb ) { if ( ! _roles . contains ( role ) ) { unassignRoleInDb ( _jaasSystem , role ) ; } }
public class HttpUtil { /** * Converts an { @ link InputStream } to a byte array * @ param in input stream to convert * @ return byte array */ public static byte [ ] getBytes ( InputStream in ) { } }
ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; WritableByteChannel wbc = Channels . newChannel ( os ) ; ReadableByteChannel rbc = Channels . newChannel ( in ) ; ByteBuffer byteBuffer = ByteBuffer . allocate ( 1024 ) ; try { while ( rbc . read ( byteBuffer ) != - 1 ) { byteBuffer . flip ( ) ; wbc . write ( byteBuffer ) ; byteBuffer . clear ( ) ; } wbc . close ( ) ; rbc . close ( ) ; return os . toByteArray ( ) ; } catch ( IOException e ) { throw new HerokuAPIException ( "IOException while reading response" , e ) ; }
public class AlignmentPredictionModel { /** * Trains on full corpus */ public boolean trainOnAll ( ) { } }
List < Map < String , String > > trueLabels = loadLabels ( _trueLabelsFile ) ; List < String > corpus = loadTrainingCorpus ( _trainingCorpusFile ) ; List < List < Acronym > > trainingExtractedCandidates = new ArrayList < List < Acronym > > ( ) ; List < Map < String , String > > trueLabelsForTraining = new ArrayList < Map < String , String > > ( ) ; for ( Integer docID = 0 ; docID < corpus . size ( ) ; ++ docID ) { // Adds to training examples , all the extracted pairs from the // current document . trainingExtractedCandidates . add ( extractCandidatePairs ( corpus . get ( docID ) ) ) ; trueLabelsForTraining . add ( trueLabels . get ( docID ) ) ; } return _abbvHmm . train ( trainingExtractedCandidates , trueLabelsForTraining , true ) ;
public class AbstractIoBuffer { /** * { @ inheritDoc } */ @ Override public final IoBuffer capacity ( int newCapacity ) { } }
if ( ! recapacityAllowed ) { throw new IllegalStateException ( "Derived buffers and their parent can't be expanded." ) ; } // Allocate a new buffer and transfer all settings to it . if ( newCapacity > capacity ( ) ) { // Expand : // / / Save the state . int pos = position ( ) ; int limit = limit ( ) ; ByteOrder bo = order ( ) ; // / / Reallocate . ByteBuffer oldBuf = buf ( ) ; ByteBuffer newBuf = getAllocator ( ) . allocateNioBuffer ( newCapacity , isDirect ( ) ) ; oldBuf . clear ( ) ; newBuf . put ( oldBuf ) ; buf ( newBuf ) ; // / / Restore the state . buf ( ) . limit ( limit ) ; if ( mark >= 0 ) { buf ( ) . position ( mark ) ; buf ( ) . mark ( ) ; } buf ( ) . position ( pos ) ; buf ( ) . order ( bo ) ; } return this ;
public class Cluster { /** * Set a gain when the cluster sectioned . */ void set_sectioned_gain ( ) { } }
double gain = 0.0f ; if ( sectioned_gain_ == 0 && sectioned_clusters_ . size ( ) > 1 ) { for ( Cluster < K > cluster : sectioned_clusters_ ) { gain += cluster . composite_vector ( ) . norm ( ) ; } gain -= composite_ . norm ( ) ; } sectioned_gain_ = gain ;
public class DisassociatePhoneNumbersFromVoiceConnectorResult { /** * If the action fails for one or more of the phone numbers in the request , a list of the phone numbers is returned , * along with error codes and error messages . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPhoneNumberErrors ( java . util . Collection ) } or { @ link # withPhoneNumberErrors ( java . util . Collection ) } if * you want to override the existing values . * @ param phoneNumberErrors * If the action fails for one or more of the phone numbers in the request , a list of the phone numbers is * returned , along with error codes and error messages . * @ return Returns a reference to this object so that method calls can be chained together . */ public DisassociatePhoneNumbersFromVoiceConnectorResult withPhoneNumberErrors ( PhoneNumberError ... phoneNumberErrors ) { } }
if ( this . phoneNumberErrors == null ) { setPhoneNumberErrors ( new java . util . ArrayList < PhoneNumberError > ( phoneNumberErrors . length ) ) ; } for ( PhoneNumberError ele : phoneNumberErrors ) { this . phoneNumberErrors . add ( ele ) ; } return this ;
public class LockdownManager { /** * Enable lockdown if it ' s not already enabled , using the recommended time * from the provided { @ link ConnectionException } , if any . * @ param connectionException ConnectionException to check for a recommended * lockdown time , may be null * @ return whether or not this call actually locked the system down */ public synchronized boolean lockdown ( ConnectionException connectionException ) { } }
// If we are already in a lockdown state , don ' t change anything if ( isLockedDown ( ) ) { return false ; } if ( connectionException != null && connectionException . getRecommendedLockdownTime ( ) != null ) { lockdownTime = connectionException . getRecommendedLockdownTime ( ) ; } else if ( lockdownTime != 0 ) { lockdownTime = lockdownTime * 2 ; } else { lockdownTime = baseLockdownTime ; } lockdownTime = Math . min ( maxLockdownTime , lockdownTime ) ; lockdownStartTime = clock . date ( ) ; return true ;
public class SequenceLabelerME { /** * Gets the name type from the outcome * @ param outcome * the outcome * @ return the name type , or null if not set */ static final String extractNameType ( final String outcome ) { } }
final Matcher matcher = typedOutcomePattern . matcher ( outcome ) ; if ( matcher . matches ( ) ) { final String nameType = matcher . group ( 1 ) ; return nameType ; } return null ;
public class StringUtils { /** * Writes a String to the given output . * The written string can be read with { @ link # readString ( DataInputView ) } . * @ param str The string to write * @ param out The output to write to * @ throws IOException Thrown , if the writing or the serialization fails . */ public static void writeString ( @ Nonnull String str , DataOutputView out ) throws IOException { } }
checkNotNull ( str ) ; StringValue . writeString ( str , out ) ;
public class Configuration { /** * this hoop is needed so that we can type - check the " T " through the stream manipulations */ private < T extends Property > Map < String , String > getOverriddenImplementationConfiguration ( Collection < T > overridableProperties ) { } }
Map < String , String > ret = new HashMap < > ( ) ; overridableProperties . forEach ( p -> { String val = getProperty ( p , null ) ; if ( val != null ) { ret . put ( p . getPropertyName ( ) , val ) ; } } ) ; implementationConfiguration . forEach ( ret :: putIfAbsent ) ; return ret ;
public class FeatureUtilities { /** * Calculate the avg of a value in a list of { @ link SimpleFeature } s . * < p > Empty records are ignored . * @ param features the features . * @ param field the field to consider . * @ return the avg . */ public static double avg ( List < SimpleFeature > features , String field ) { } }
double sum = 0 ; int count = 0 ; for ( SimpleFeature feature : features ) { Object attribute = feature . getAttribute ( field ) ; if ( attribute instanceof Number ) { sum = sum + ( ( Number ) attribute ) . doubleValue ( ) ; count ++ ; } } double avg = sum / count ; return avg ;
public class PermissionEvaluator { /** * Grants access by permission . If the effective account has a role that resolves * to the specified permission ( accoording to mappings of restcomm . xml ) access is granted . * Administrator is granted access regardless of permissions . * @ param permission - e . g . ' RestComm : Create : Accounts ' * @ param userIdentityContext */ public void checkPermission ( final String permission , UserIdentityContext userIdentityContext ) { } }
// checkAuthenticatedAccount ( ) ; / / ok there is a valid authenticated account if ( checkPermission ( permission , userIdentityContext . getEffectiveAccountRoles ( ) ) != AuthOutcome . OK ) throw new InsufficientPermission ( ) ;
public class AmazonEKSClient { /** * Lists the Amazon EKS clusters in your AWS account in the specified Region . * @ param listClustersRequest * @ return Result of the ListClusters operation returned by the service . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClientException * These errors are usually caused by a client action . Actions can include using an action or resource on * behalf of a user that doesn ' t have permissions to use the action or resource or specifying an identifier * that is not valid . * @ throws ServerException * These errors are usually caused by a server - side issue . * @ throws ServiceUnavailableException * The service is unavailable . Back off and retry the operation . * @ sample AmazonEKS . ListClusters * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / eks - 2017-11-01 / ListClusters " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListClustersResult listClusters ( ListClustersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListClusters ( request ) ;
public class DatabaseAccountsInner { /** * Lists the read - only access keys for the specified Azure Cosmos DB database account . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DatabaseAccountListReadOnlyKeysResultInner object */ public Observable < DatabaseAccountListReadOnlyKeysResultInner > listReadOnlyKeysAsync ( String resourceGroupName , String accountName ) { } }
return listReadOnlyKeysWithServiceResponseAsync ( resourceGroupName , accountName ) . map ( new Func1 < ServiceResponse < DatabaseAccountListReadOnlyKeysResultInner > , DatabaseAccountListReadOnlyKeysResultInner > ( ) { @ Override public DatabaseAccountListReadOnlyKeysResultInner call ( ServiceResponse < DatabaseAccountListReadOnlyKeysResultInner > response ) { return response . body ( ) ; } } ) ;
public class ProtobufIDLProxy { /** * Check class . * @ param packageName the package name * @ param type the type * @ param mappedUniName the mapped uni name * @ param isUniName the is uni name * @ return the class */ private static Class checkClass ( String packageName , TypeElement type , Map < String , String > mappedUniName , boolean isUniName ) { } }
String simpleName = getProxyClassName ( type . name ( ) , mappedUniName , isUniName ) ; String className = packageName + PACKAGE_SPLIT_CHAR + simpleName ; Class < ? > c = null ; try { c = Class . forName ( className ) ; } catch ( ClassNotFoundException e1 ) { // if class not found so should generate a new java source class . c = null ; } return c ;
public class SpringExceptionHandler { /** * Vendor specific translation . Reads returned errorcode / sqlstate and converts it into Spring SQL exception * based on existing lists . * @ param reason a description of the exception * @ param SQLState an XOPEN or SQL : 2003 code identifying the exception * @ param vendorCode a database vendor - specific exception code * @ param cause original SQL Exception * @ return SQL Exception converted into Spring SQL Exception . Null otherwise */ private MjdbcSQLException translate ( String reason , String SQLState , int vendorCode , SQLException cause ) { } }
MjdbcSQLException result = null ; String sqlState = getSqlState ( cause ) ; String errorCode = getErrorCode ( cause ) ; if ( dbName . startsWith ( "DB2" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_TRANSILIENT_DATA_ACCESS_RESOURCE_EXCEPTION . contains ( errorCode ) == true ) { result = new TransientDataAccessResourceException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DB2_ERROR_CODE_DEADLOCK_LOSER_EXCEPTION . contains ( errorCode ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . contains ( "Derby" ) == true ) { // using sql state to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_BAD_SQL_GRAMMAR . contains ( sqlState ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_DUPLICATE_KEY_EXCEPTION . contains ( sqlState ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_DATA_INTEGRITY_VIOLATION . contains ( sqlState ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_DATA_ACCESS_RESOURCE_FAILURE . contains ( sqlState ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_ACQUIRE_LOCK_EXCEPTION . contains ( sqlState ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . DERBY_SQL_STATE_DEADLOCK_LOSER_EXCEPTION . contains ( sqlState ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "H2" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . H2_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . H2_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . H2_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . H2_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . H2_ERROR_CODE_ACQUIRE_LOCK_EXCEPTION . contains ( errorCode ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "HSQL" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . HSQL_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . HSQL_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . HSQL_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . HSQL_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "Informix" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . INFORMIX_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . INFORMIX_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . INFORMIX_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "Microsoft SQL Server" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_ACQUIRE_LOCK_EXCEPTION . contains ( errorCode ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_PERMISSION_DENIED . contains ( errorCode ) == true ) { result = new PermissionDeniedDataAccessException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MSSQL_ERROR_CODE_DEADLOCK_LOSER_EXCEPTION . contains ( errorCode ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "MySQL" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_ACQUIRE_LOCK_EXCEPTION . contains ( errorCode ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . MySQL_ERROR_CODE_DEADLOCK_LOSER_EXCEPTION . contains ( errorCode ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "Oracle" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_ACQUIRE_LOCK_EXCEPTION . contains ( errorCode ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_INVALID_RESULTSET_ACCESS . contains ( errorCode ) == true ) { result = new InvalidResultSetAccessException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_DEADLOCK_LOSER_EXCEPTION . contains ( errorCode ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . ORACLE_ERROR_CODE_CANNOT_SERIALIZE_TRANSACTION . contains ( errorCode ) == true ) { result = new CannotSerializeTransactionException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "PostgreSQL" ) == true ) { // using sql state to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_BAD_SQL_GRAMMAR . contains ( sqlState ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_DUPLICATE_KEY_EXCEPTION . contains ( sqlState ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_DATA_INTEGRITY_VIOLATION . contains ( sqlState ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_DATA_ACCESS_RESOURCE_FAILURE . contains ( sqlState ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_ACQUIRE_LOCK_EXCEPTION . contains ( sqlState ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_DEADLOCK_LOSER_EXCEPTION . contains ( sqlState ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . POSTGRES_SQL_STATE_CANNOT_SERIALIZE_TRANSACTION . contains ( sqlState ) == true ) { result = new CannotSerializeTransactionException ( reason , SQLState , vendorCode ) ; } } else if ( dbName . startsWith ( "Sybase" ) == true ) { // using error code to translate vendor specific exception into Spring SQL exception if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_BAD_SQL_GRAMMAR . contains ( errorCode ) == true ) { result = new BadSqlGrammarException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_DUPLICATE_KEY_EXCEPTION . contains ( errorCode ) == true ) { result = new DuplicateKeyException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_DATA_INTEGRITY_VIOLATION . contains ( errorCode ) == true ) { result = new DataIntegrityViolationException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_DATA_ACCESS_RESOURCE_FAILURE . contains ( errorCode ) == true ) { result = new DataAccessResourceFailureException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_ACQUIRE_LOCK_EXCEPTION . contains ( errorCode ) == true ) { result = new CannotAcquireLockException ( reason , SQLState , vendorCode ) ; } else if ( SpringExceptionHandlerConstants . SYBASE_ERROR_CODE_DEADLOCK_LOSER_EXCEPTION . contains ( errorCode ) == true ) { result = new DeadlockLoserDataAccessException ( reason , SQLState , vendorCode ) ; } } return result ;
public class EnvelopesApi { /** * Returns document page image ( s ) based on input . * @ param accountId The external account number ( int ) or account ID Guid . ( required ) * @ param envelopeId The envelopeId Guid of the envelope being accessed . ( required ) * @ param documentId The ID of the document being accessed . ( required ) * @ return PageImages */ public PageImages getPages ( String accountId , String envelopeId , String documentId ) throws ApiException { } }
return getPages ( accountId , envelopeId , documentId , null ) ;
public class ExcelUtils { /** * 无模板 、 基于注解 、 多sheet数据 * @ param sheets 待导出sheet数据 * @ param os 生成的Excel输出文件流 * @ throws Excel4JException 异常 * @ throws IOException 异常 */ public void noTemplateSheet2Excel ( List < NoTemplateSheetWrapper > sheets , OutputStream os ) throws Excel4JException , IOException { } }
try ( Workbook workbook = exportExcelNoTemplateHandler ( sheets , true ) ) { workbook . write ( os ) ; }
public class FctBnPublicTradeProcessors { /** * < p > Get bean in lazy mode ( if bean is null then initialize it ) . < / p > * @ param pAddParam additional param * @ param pBeanName - bean name * @ return requested bean * @ throws Exception - an exception */ @ Override public final IProcessor lazyGet ( // NOPMD final Map < String , Object > pAddParam , final String pBeanName ) throws Exception { } }
IProcessor proc = this . processorsMap . get ( pBeanName ) ; if ( proc == null ) { // locking : synchronized ( this . processorsMap ) { // make sure again whether it ' s null after locking : proc = this . processorsMap . get ( pBeanName ) ; if ( proc == null ) { if ( pBeanName . equals ( PrcDelItemFromCart . class . getSimpleName ( ) ) ) { proc = lazyGetPrcDelItemFromCart ( pAddParam ) ; } else if ( pBeanName . equals ( PrCart . class . getSimpleName ( ) ) ) { proc = lazyGetPrCart ( pAddParam ) ; } else if ( pBeanName . equals ( PrcItemInCart . class . getSimpleName ( ) ) ) { proc = lazyGetPrcItemInCart ( pAddParam ) ; } else if ( pBeanName . equals ( PrLog . class . getSimpleName ( ) ) ) { proc = lazyGetPrLog ( pAddParam ) ; } else if ( pBeanName . equals ( PrcCheckOut . class . getSimpleName ( ) ) ) { proc = lazyGetPrcCheckOut ( pAddParam ) ; } else if ( pBeanName . equals ( PrBuOr . class . getSimpleName ( ) ) ) { proc = lazyGetPrBuOr ( pAddParam ) ; } else if ( pBeanName . equals ( PrBur . class . getSimpleName ( ) ) ) { proc = lazyGetPrBur ( pAddParam ) ; } else if ( pBeanName . equals ( PrPur . class . getSimpleName ( ) ) ) { proc = lazyGetPrPur ( pAddParam ) ; } else if ( pBeanName . equals ( PrcItemPage . class . getSimpleName ( ) ) ) { proc = lazyGetPrcItemPage ( pAddParam ) ; } else if ( pBeanName . equals ( PrcWebstorePage . class . getSimpleName ( ) ) ) { proc = lazyGetPrcWebstorePage ( pAddParam ) ; } } } } if ( proc == null ) { throw new ExceptionWithCode ( ExceptionWithCode . CONFIGURATION_MISTAKE , "There is no processor with name " + pBeanName ) ; } return proc ;
public class SeekableStreamIndexTaskRunner { /** * Authorizes action to be performed on this task ' s datasource * @ return authorization result */ private Access authorizationCheck ( final HttpServletRequest req , Action action ) { } }
return IndexTaskUtils . datasourceAuthorizationCheck ( req , action , task . getDataSource ( ) , authorizerMapper ) ;
public class FactoryMultiViewRobust { /** * Robust solution to PnP problem using { @ link LeastMedianOfSquares LMedS } . Input observations are * in normalized image coordinates . * < ul > * < li > Input observations are in normalized image coordinates NOT pixels < / li > * < li > Error units are pixels squared . < / li > * < / ul > * < p > See code for all the details . < / p > * @ param configPnP PnP parameters . Can ' t be null . * @ param configLMedS Parameters for LMedS . Can ' t be null . * @ return Robust Se3 _ F64 estimator */ public static ModelMatcherMultiview < Se3_F64 , Point2D3D > pnpLMedS ( @ Nullable ConfigPnP configPnP , @ Nonnull ConfigLMedS configLMedS ) { } }
if ( configPnP == null ) configPnP = new ConfigPnP ( ) ; configPnP . checkValidity ( ) ; configLMedS . checkValidity ( ) ; Estimate1ofPnP estimatorPnP = FactoryMultiView . pnp_1 ( configPnP . which , configPnP . epnpIterations , configPnP . numResolve ) ; DistanceFromModelMultiView < Se3_F64 , Point2D3D > distance = new PnPDistanceReprojectionSq ( ) ; ModelManagerSe3_F64 manager = new ModelManagerSe3_F64 ( ) ; EstimatorToGenerator < Se3_F64 , Point2D3D > generator = new EstimatorToGenerator < > ( estimatorPnP ) ; LeastMedianOfSquaresMultiView < Se3_F64 , Point2D3D > lmeds = new LeastMedianOfSquaresMultiView < > ( configLMedS . randSeed , configLMedS . totalCycles , manager , generator , distance ) ; lmeds . setErrorFraction ( configLMedS . errorFraction ) ; return lmeds ;
public class AbstractOpenPgpStore { /** * OpenPgpTrustStore */ @ Override public Trust getTrust ( BareJid owner , OpenPgpV4Fingerprint fingerprint ) throws IOException { } }
return trustStore . getTrust ( owner , fingerprint ) ;
public class Destination { /** * The To : field ( s ) of the message . * @ param toAddresses * The To : field ( s ) of the message . */ public void setToAddresses ( java . util . Collection < String > toAddresses ) { } }
if ( toAddresses == null ) { this . toAddresses = null ; return ; } this . toAddresses = new com . amazonaws . internal . SdkInternalList < String > ( toAddresses ) ;
public class HtmlEscape { /** * Perform an HTML5 level 2 ( result is ASCII ) < strong > escape < / strong > operation on a < tt > char [ ] < / tt > input . * < em > Level 2 < / em > means this method will escape : * < ul > * < li > The five markup - significant characters : < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , * < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > < / li > * < li > All non ASCII characters . < / li > * < / ul > * This escape will be performed by replacing those chars by the corresponding HTML5 Named Character References * ( e . g . < tt > ' & amp ; acute ; ' < / tt > ) when such NCR exists for the replaced character , and replacing by a decimal * character reference ( e . g . < tt > ' & amp ; # 8345 ; ' < / tt > ) when there there is no NCR for the replaced character . * This method calls { @ link # escapeHtml ( char [ ] , int , int , java . io . Writer , HtmlEscapeType , HtmlEscapeLevel ) } * with the following preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . html . HtmlEscapeType # HTML5 _ NAMED _ REFERENCES _ DEFAULT _ TO _ DECIMAL } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . html . HtmlEscapeLevel # LEVEL _ 2 _ ALL _ NON _ ASCII _ PLUS _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param text the < tt > char [ ] < / tt > to be escaped . * @ param offset the position in < tt > text < / tt > at which the escape operation should start . * @ param len the number of characters in < tt > text < / tt > that should be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs */ public static void escapeHtml5 ( final char [ ] text , final int offset , final int len , final Writer writer ) throws IOException { } }
escapeHtml ( text , offset , len , writer , HtmlEscapeType . HTML5_NAMED_REFERENCES_DEFAULT_TO_DECIMAL , HtmlEscapeLevel . LEVEL_2_ALL_NON_ASCII_PLUS_MARKUP_SIGNIFICANT ) ;
public class CmsColor { /** * Calculates the largest value between the three inputs . < p > * @ param first value * @ param second value * @ param third value * @ return the largest value between the three inputs */ private float MAX ( float first , float second , float third ) { } }
float max = Integer . MIN_VALUE ; if ( first > max ) { max = first ; } if ( second > max ) { max = second ; } if ( third > max ) { max = third ; } return max ;
public class AbstractHealthAggregator { /** * Return the map of ' aggregate ' details that should be used from the specified * healths . * @ param healths the health instances to aggregate * @ return a map of details * @ since 1.3.1 */ protected Map < String , Object > aggregateDetails ( Map < String , Health > healths ) { } }
return new LinkedHashMap < > ( healths ) ;
public class ConstraintViolationException_CustomFieldSerializer { /** * instantiate constraint violation exception . * @ param streamReader serialization stream reader to read data from * @ return ConstraintViolationException * @ throws SerializationException if deserialization fails */ public static ConstraintViolationException instantiate ( final SerializationStreamReader streamReader ) throws SerializationException { } }
final String message = streamReader . readString ( ) ; @ SuppressWarnings ( "unchecked" ) final Set < ConstraintViolation < ? > > set = ( Set < ConstraintViolation < ? > > ) streamReader . readObject ( ) ; return new ConstraintViolationException ( message , set ) ;
public class Residue0 { /** * re - using partword */ synchronized static int _01inverse ( Block vb , Object vl , float [ ] [ ] in , int ch , int decodepart ) { } }
int i , j , k , l , s ; LookResidue0 look = ( LookResidue0 ) vl ; InfoResidue0 info = look . info ; // move all this setup out later int samples_per_partition = info . grouping ; int partitions_per_word = look . phrasebook . dim ; int n = info . end - info . begin ; int partvals = n / samples_per_partition ; int partwords = ( partvals + partitions_per_word - 1 ) / partitions_per_word ; if ( _01inverse_partword . length < ch ) { _01inverse_partword = new int [ ch ] [ ] [ ] ; } for ( j = 0 ; j < ch ; j ++ ) { if ( _01inverse_partword [ j ] == null || _01inverse_partword [ j ] . length < partwords ) { _01inverse_partword [ j ] = new int [ partwords ] [ ] ; } } for ( s = 0 ; s < look . stages ; s ++ ) { // each loop decodes on partition codeword containing // partitions _ pre _ word partitions for ( i = 0 , l = 0 ; i < partvals ; l ++ ) { if ( s == 0 ) { // fetch the partition word for each channel for ( j = 0 ; j < ch ; j ++ ) { int temp = look . phrasebook . decode ( vb . opb ) ; if ( temp == - 1 ) { return ( 0 ) ; } _01inverse_partword [ j ] [ l ] = look . decodemap [ temp ] ; if ( _01inverse_partword [ j ] [ l ] == null ) { return ( 0 ) ; } } } // now we decode residual values for the partitions for ( k = 0 ; k < partitions_per_word && i < partvals ; k ++ , i ++ ) for ( j = 0 ; j < ch ; j ++ ) { int offset = info . begin + i * samples_per_partition ; int index = _01inverse_partword [ j ] [ l ] [ k ] ; if ( ( info . secondstages [ index ] & ( 1 << s ) ) != 0 ) { CodeBook stagebook = look . fullbooks [ look . partbooks [ index ] [ s ] ] ; if ( stagebook != null ) { if ( decodepart == 0 ) { if ( stagebook . decodevs_add ( in [ j ] , offset , vb . opb , samples_per_partition ) == - 1 ) { return ( 0 ) ; } } else if ( decodepart == 1 ) { if ( stagebook . decodev_add ( in [ j ] , offset , vb . opb , samples_per_partition ) == - 1 ) { return ( 0 ) ; } } } } } } } return ( 0 ) ;
public class ComputerVisionImpl { /** * This operation generates a description of an image in human readable language with complete sentences . The description is based on a collection of content tags , which are also returned by the operation . More than one description can be generated for each image . Descriptions are ordered by their confidence score . All descriptions are in English . Two input methods are supported - - ( 1 ) Uploading an image or ( 2 ) specifying an image URL . A successful response will be returned in JSON . If the request failed , the response will contain an error code and a message to help understand what went wrong . * @ param url Publicly reachable URL of an image * @ param describeImageOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImageDescription object */ public Observable < ImageDescription > describeImageAsync ( String url , DescribeImageOptionalParameter describeImageOptionalParameter ) { } }
return describeImageWithServiceResponseAsync ( url , describeImageOptionalParameter ) . map ( new Func1 < ServiceResponse < ImageDescription > , ImageDescription > ( ) { @ Override public ImageDescription call ( ServiceResponse < ImageDescription > response ) { return response . body ( ) ; } } ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link VendorCredit } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "VendorCredit" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < VendorCredit > createVendorCredit ( VendorCredit value ) { } }
return new JAXBElement < VendorCredit > ( _VendorCredit_QNAME , VendorCredit . class , null , value ) ;
public class YedGmlWriter { /** * Exports a directed graph into a PLAIN text file in GML format . * @ param output the writer to which the graph to be exported * @ param g the directed graph to be exported */ public void export ( Writer output , DirectedGraph < V , E > g ) { } }
export ( output , g , true ) ;
public class ArrayBlockingQueue { /** * Inserts the specified element at the tail of this queue if it is * possible to do so immediately without exceeding the queue ' s capacity , * returning { @ code true } upon success and { @ code false } if this queue * is full . This method is generally preferable to method { @ link # add } , * which can fail to insert an element only by throwing an exception . * @ throws NullPointerException if the specified element is null */ public boolean offer ( E e ) { } }
Objects . requireNonNull ( e ) ; final ReentrantLock lock = this . lock ; lock . lock ( ) ; try { if ( count == items . length ) return false ; else { enqueue ( e ) ; return true ; } } finally { lock . unlock ( ) ; }
public class MountTable { /** * Mounts the given UFS path at the given Alluxio path . The Alluxio path should not be nested * under an existing mount point . * @ param journalContext the journal context * @ param alluxioUri an Alluxio path URI * @ param ufsUri a UFS path URI * @ param mountId the mount id * @ param options the mount options * @ throws FileAlreadyExistsException if the mount point already exists * @ throws InvalidPathException if an invalid path is encountered */ public void add ( Supplier < JournalContext > journalContext , AlluxioURI alluxioUri , AlluxioURI ufsUri , long mountId , MountPOptions options ) throws FileAlreadyExistsException , InvalidPathException { } }
String alluxioPath = alluxioUri . getPath ( ) . isEmpty ( ) ? "/" : alluxioUri . getPath ( ) ; LOG . info ( "Mounting {} at {}" , ufsUri , alluxioPath ) ; try ( LockResource r = new LockResource ( mWriteLock ) ) { if ( mState . getMountTable ( ) . containsKey ( alluxioPath ) ) { throw new FileAlreadyExistsException ( ExceptionMessage . MOUNT_POINT_ALREADY_EXISTS . getMessage ( alluxioPath ) ) ; } // Make sure that the ufs path we ' re trying to mount is not a prefix // or suffix of any existing mount path . for ( Map . Entry < String , MountInfo > entry : mState . getMountTable ( ) . entrySet ( ) ) { AlluxioURI mountedUfsUri = entry . getValue ( ) . getUfsUri ( ) ; if ( ( ufsUri . getScheme ( ) == null || ufsUri . getScheme ( ) . equals ( mountedUfsUri . getScheme ( ) ) ) && ( ufsUri . getAuthority ( ) . toString ( ) . equals ( mountedUfsUri . getAuthority ( ) . toString ( ) ) ) ) { String ufsPath = ufsUri . getPath ( ) . isEmpty ( ) ? "/" : ufsUri . getPath ( ) ; String mountedUfsPath = mountedUfsUri . getPath ( ) . isEmpty ( ) ? "/" : mountedUfsUri . getPath ( ) ; if ( PathUtils . hasPrefix ( ufsPath , mountedUfsPath ) ) { throw new InvalidPathException ( ExceptionMessage . MOUNT_POINT_PREFIX_OF_ANOTHER . getMessage ( mountedUfsUri . toString ( ) , ufsUri . toString ( ) ) ) ; } if ( PathUtils . hasPrefix ( mountedUfsPath , ufsPath ) ) { throw new InvalidPathException ( ExceptionMessage . MOUNT_POINT_PREFIX_OF_ANOTHER . getMessage ( ufsUri . toString ( ) , mountedUfsUri . toString ( ) ) ) ; } } } Map < String , String > properties = options . getPropertiesMap ( ) ; mState . applyAndJournal ( journalContext , AddMountPointEntry . newBuilder ( ) . addAllProperties ( properties . entrySet ( ) . stream ( ) . map ( entry -> StringPairEntry . newBuilder ( ) . setKey ( entry . getKey ( ) ) . setValue ( entry . getValue ( ) ) . build ( ) ) . collect ( Collectors . toList ( ) ) ) . setAlluxioPath ( alluxioPath ) . setMountId ( mountId ) . setReadOnly ( options . getReadOnly ( ) ) . setShared ( options . getShared ( ) ) . setUfsPath ( ufsUri . toString ( ) ) . build ( ) ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } * { @ link CmisAccessControlListType } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "addACEs" , scope = CreateRelationship . class ) public JAXBElement < CmisAccessControlListType > createCreateRelationshipAddACEs ( CmisAccessControlListType value ) { } }
return new JAXBElement < CmisAccessControlListType > ( _CreateDocumentAddACEs_QNAME , CmisAccessControlListType . class , CreateRelationship . class , value ) ;
public class Lookup { /** * / * interpolated lookup based cos function , domain 0 to PI only */ static float coslook ( float a ) { } }
double d = a * ( .31830989 * ( float ) COS_LOOKUP_SZ ) ; int i = ( int ) d ; return COS_LOOKUP [ i ] + ( ( float ) ( d - i ) ) * ( COS_LOOKUP [ i + 1 ] - COS_LOOKUP [ i ] ) ;
public class DefaultEntityHandler { /** * INSERT SQL生成 * @ param metadata エンティティメタ情報 * @ param type エイティティタイプ * @ param sqlConfig SQLコンフィグ * @ param ignoreWhenEmpty 空白パラメータをSQLに含めない条件文を設定する * @ return INSERT SQL */ protected String buildInsertSQL ( final TableMetadata metadata , final Class < ? extends Object > type , final SqlConfig sqlConfig , final boolean ignoreWhenEmpty ) { } }
Map < String , MappingColumn > mappingColumns = MappingUtils . getMappingColumnMap ( type , SqlKind . INSERT ) ; StringBuilder sql = buildInsertTargetBlock ( metadata , mappingColumns , sqlConfig , ignoreWhenEmpty ) ; sql . append ( " VALUES " ) ; sql . append ( buildInsertRowBlock ( metadata , mappingColumns , sqlConfig , ignoreWhenEmpty , TableMetadata . Column :: getCamelColumnName ) ) ; return sql . toString ( ) ;
public class DateIntervalInfo { /** * Initialize the DateIntervalInfo from locale * @ param locale the given locale . */ private void initializeData ( ULocale locale ) { } }
String key = locale . toString ( ) ; DateIntervalInfo dii = DIICACHE . get ( key ) ; if ( dii == null ) { // initialize data from scratch setup ( locale ) ; // Marking fIntervalPatterns read - only makes cloning cheaper . fIntervalPatternsReadOnly = true ; // We freeze what goes in the cache without freezing this object . DIICACHE . put ( key , ( ( DateIntervalInfo ) clone ( ) ) . freeze ( ) ) ; } else { initializeFromReadOnlyPatterns ( dii ) ; }
public class SolutionListUtils { /** * This method receives a normalized list of non - dominated solutions and return the inverted one . * This operation is needed for minimization problem * @ param solutionSet The front to invert * @ return The inverted front */ @ SuppressWarnings ( "unchecked" ) public static < S extends Solution < ? > > List < S > getInvertedFront ( List < S > solutionSet ) { } }
List < S > invertedFront = new ArrayList < > ( solutionSet . size ( ) ) ; int numberOfObjectives = solutionSet . get ( 0 ) . getNumberOfObjectives ( ) ; for ( int i = 0 ; i < solutionSet . size ( ) ; i ++ ) { invertedFront . add ( i , ( S ) solutionSet . get ( i ) . copy ( ) ) ; for ( int j = 0 ; j < numberOfObjectives ; j ++ ) { if ( solutionSet . get ( i ) . getObjective ( j ) <= 1.0 && solutionSet . get ( i ) . getObjective ( j ) >= 0.0 ) { invertedFront . get ( i ) . setObjective ( j , 1.0 - solutionSet . get ( i ) . getObjective ( j ) ) ; } else if ( solutionSet . get ( i ) . getObjective ( j ) > 1.0 ) { invertedFront . get ( i ) . setObjective ( j , 0.0 ) ; } else if ( solutionSet . get ( i ) . getObjective ( j ) < 0.0 ) { invertedFront . get ( i ) . setObjective ( j , 1.0 ) ; } } } return invertedFront ;
public class JsonDBTemplate { /** * / * ( non - Javadoc ) * @ see org . jsondb . JsonDBOperations # findAllAndRemove ( java . lang . String , java . lang . String ) */ @ Override public < T > List < T > findAllAndRemove ( String jxQuery , String collectionName ) { } }
CollectionMetaData cmd = cmdMap . get ( collectionName ) ; @ SuppressWarnings ( "unchecked" ) Map < Object , T > collection = ( Map < Object , T > ) collectionsRef . get ( ) . get ( collectionName ) ; if ( ( null == cmd ) || ( null == collection ) ) { throw new InvalidJsonDbApiUsageException ( "Collection by name '" + collectionName + "' not found. Create collection first." ) ; } cmd . getCollectionLock ( ) . writeLock ( ) . lock ( ) ; try { JXPathContext context = contextsRef . get ( ) . get ( collectionName ) ; @ SuppressWarnings ( "unchecked" ) Iterator < T > resultItr = context . iterate ( jxQuery ) ; Set < Object > removeIds = new HashSet < Object > ( ) ; while ( resultItr . hasNext ( ) ) { T objectToRemove = resultItr . next ( ) ; Object idToRemove = Util . getIdForEntity ( objectToRemove , cmd . getIdAnnotatedFieldGetterMethod ( ) ) ; removeIds . add ( idToRemove ) ; } if ( removeIds . size ( ) < 1 ) { return null ; } JsonWriter jw ; try { jw = new JsonWriter ( dbConfig , cmd , collectionName , fileObjectsRef . get ( ) . get ( collectionName ) ) ; } catch ( IOException ioe ) { logger . error ( "Failed to obtain writer for " + collectionName , ioe ) ; throw new JsonDBException ( "Failed to save " + collectionName , ioe ) ; } boolean substractResult = jw . removeFromJsonFile ( collection , removeIds ) ; List < T > removedObjects = null ; if ( substractResult ) { removedObjects = new ArrayList < T > ( ) ; for ( Object id : removeIds ) { // Don ' t need to clone it , this object no more exists in the collection removedObjects . add ( collection . remove ( id ) ) ; } } return removedObjects ; } finally { cmd . getCollectionLock ( ) . writeLock ( ) . unlock ( ) ; }
public class XMLFormatter { /** * Return the header string for a set of XML formatted records . * @ param h The target handler ( can be null ) * @ return a valid XML string */ public String getHead ( Handler h ) { } }
StringBuilder sb = new StringBuilder ( ) ; String encoding ; sb . append ( "<?xml version=\"1.0\"" ) ; if ( h != null ) { encoding = h . getEncoding ( ) ; } else { encoding = null ; } if ( encoding == null ) { // Figure out the default encoding . encoding = java . nio . charset . Charset . defaultCharset ( ) . name ( ) ; } // Try to map the encoding name to a canonical name . try { Charset cs = Charset . forName ( encoding ) ; encoding = cs . name ( ) ; } catch ( Exception ex ) { // We hit problems finding a canonical name . // Just use the raw encoding name . } sb . append ( " encoding=\"" ) ; sb . append ( encoding ) ; sb . append ( "\"" ) ; sb . append ( " standalone=\"no\"?>\n" ) ; sb . append ( "<!DOCTYPE log SYSTEM \"logger.dtd\">\n" ) ; sb . append ( "<log>\n" ) ; return sb . toString ( ) ;
public class StreamEx { /** * Returns a stream consisting of the results of applying the given function * to the the first element and every other element of this stream . * This is a < a href = " package - summary . html # StreamOps " > quasi - intermediate * operation < / a > . * The size of the resulting stream is one element less than the input * stream . If the input stream is empty or contains just one element , then * the output stream will be empty . * @ param < R > The element type of the new stream * @ param mapper a non - interfering , stateless function to apply to the first * stream element and every other element * @ return the new stream * @ see # withFirst ( ) * @ see # headTail ( BiFunction ) * @ since 0.5.3 */ public < R > StreamEx < R > withFirst ( BiFunction < ? super T , ? super T , ? extends R > mapper ) { } }
WithFirstSpliterator < T , R > spliterator = new WithFirstSpliterator < > ( spliterator ( ) , mapper ) ; return new StreamEx < > ( spliterator , context ) ;
public class ManagementPoliciesInner { /** * Gets the managementpolicy associated with the specified storage account . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ManagementPolicyInner object if successful . */ public ManagementPolicyInner get ( String resourceGroupName , String accountName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , accountName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class User { /** * Gets the list of { @ link Build } s that include changes by this user , * by the timestamp order . */ @ SuppressWarnings ( "unchecked" ) @ WithBridgeMethods ( List . class ) public @ Nonnull RunList getBuilds ( ) { } }
return RunList . fromJobs ( ( Iterable ) Jenkins . get ( ) . allItems ( Job . class ) ) . filter ( ( Predicate < Run < ? , ? > > ) r -> r instanceof AbstractBuild && relatedTo ( ( AbstractBuild < ? , ? > ) r ) ) ;
public class ApiOvhIp { /** * Park this IP * REST : POST / ip / { ip } / park * @ param ip [ required ] */ public OvhIpTask ip_park_POST ( String ip ) throws IOException { } }
String qPath = "/ip/{ip}/park" ; StringBuilder sb = path ( qPath , ip ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhIpTask . class ) ;
public class CmsSetupBean { /** * Returns a sorted list with they keys ( e . g . " mysql " , " generic " or " oracle " ) of all available * database server setups found in " / setup / database / " sorted by their ranking property . < p > * @ return a sorted list with they keys ( e . g . " mysql " , " generic " or " oracle " ) of all available database server setups */ public List < String > getSortedDatabases ( ) { } }
if ( m_sortedDatabaseKeys == null ) { List < String > databases = m_databaseKeys ; List < String > sortedDatabases = new ArrayList < String > ( databases . size ( ) ) ; SortedMap < Integer , String > mappedDatabases = new TreeMap < Integer , String > ( ) ; for ( int i = 0 ; i < databases . size ( ) ; i ++ ) { String key = databases . get ( i ) ; Integer ranking = new Integer ( 0 ) ; try { ranking = Integer . valueOf ( getDbProperty ( key + ".ranking" ) ) ; } catch ( Exception e ) { // ignore } mappedDatabases . put ( ranking , key ) ; } while ( mappedDatabases . size ( ) > 0 ) { // get database with highest ranking Integer key = mappedDatabases . lastKey ( ) ; String database = mappedDatabases . get ( key ) ; sortedDatabases . add ( database ) ; mappedDatabases . remove ( key ) ; } m_sortedDatabaseKeys = sortedDatabases ; } return m_sortedDatabaseKeys ;
public class InternalXtypeParser { /** * InternalXtype . g : 878:1 : ruleXImportDeclaration returns [ EObject current = null ] : ( otherlv _ 0 = ' import ' ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) ( otherlv _ 8 = ' ; ' ) ? ) ; */ public final EObject ruleXImportDeclaration ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_0 = null ; Token lv_static_1_0 = null ; Token lv_extension_2_0 = null ; Token lv_wildcard_4_0 = null ; Token otherlv_8 = null ; AntlrDatatypeRuleToken lv_memberName_5_0 = null ; AntlrDatatypeRuleToken lv_importedNamespace_7_0 = null ; enterRule ( ) ; try { // InternalXtype . g : 884:2 : ( ( otherlv _ 0 = ' import ' ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) ( otherlv _ 8 = ' ; ' ) ? ) ) // InternalXtype . g : 885:2 : ( otherlv _ 0 = ' import ' ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) ( otherlv _ 8 = ' ; ' ) ? ) { // InternalXtype . g : 885:2 : ( otherlv _ 0 = ' import ' ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) ( otherlv _ 8 = ' ; ' ) ? ) // InternalXtype . g : 886:3 : otherlv _ 0 = ' import ' ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) ( otherlv _ 8 = ' ; ' ) ? { otherlv_0 = ( Token ) match ( input , 24 , FOLLOW_19 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_0 , grammarAccess . getXImportDeclarationAccess ( ) . getImportKeyword_0 ( ) ) ; } // InternalXtype . g : 890:3 : ( ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) | ( ( ruleQualifiedName ) ) | ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) ) int alt18 = 3 ; alt18 = dfa18 . predict ( input ) ; switch ( alt18 ) { case 1 : // InternalXtype . g : 891:4 : ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) { // InternalXtype . g : 891:4 : ( ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) ) // InternalXtype . g : 892:5 : ( ( lv _ static _ 1_0 = ' static ' ) ) ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? ( ( ruleQualifiedNameInStaticImport ) ) ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) { // InternalXtype . g : 892:5 : ( ( lv _ static _ 1_0 = ' static ' ) ) // InternalXtype . g : 893:6 : ( lv _ static _ 1_0 = ' static ' ) { // InternalXtype . g : 893:6 : ( lv _ static _ 1_0 = ' static ' ) // InternalXtype . g : 894:7 : lv _ static _ 1_0 = ' static ' { lv_static_1_0 = ( Token ) match ( input , 25 , FOLLOW_20 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_static_1_0 , grammarAccess . getXImportDeclarationAccess ( ) . getStaticStaticKeyword_1_0_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXImportDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "static" , true , "static" ) ; } } } // InternalXtype . g : 906:5 : ( ( lv _ extension _ 2_0 = ' extension ' ) ) ? int alt16 = 2 ; int LA16_0 = input . LA ( 1 ) ; if ( ( LA16_0 == 26 ) ) { alt16 = 1 ; } switch ( alt16 ) { case 1 : // InternalXtype . g : 907:6 : ( lv _ extension _ 2_0 = ' extension ' ) { // InternalXtype . g : 907:6 : ( lv _ extension _ 2_0 = ' extension ' ) // InternalXtype . g : 908:7 : lv _ extension _ 2_0 = ' extension ' { lv_extension_2_0 = ( Token ) match ( input , 26 , FOLLOW_20 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_extension_2_0 , grammarAccess . getXImportDeclarationAccess ( ) . getExtensionExtensionKeyword_1_0_1_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXImportDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "extension" , true , "extension" ) ; } } } break ; } // InternalXtype . g : 920:5 : ( ( ruleQualifiedNameInStaticImport ) ) // InternalXtype . g : 921:6 : ( ruleQualifiedNameInStaticImport ) { // InternalXtype . g : 921:6 : ( ruleQualifiedNameInStaticImport ) // InternalXtype . g : 922:7 : ruleQualifiedNameInStaticImport { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXImportDeclarationRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXImportDeclarationAccess ( ) . getImportedTypeJvmDeclaredTypeCrossReference_1_0_2_0 ( ) ) ; } pushFollow ( FOLLOW_21 ) ; ruleQualifiedNameInStaticImport ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXtype . g : 936:5 : ( ( ( lv _ wildcard _ 4_0 = ' * ' ) ) | ( ( lv _ memberName _ 5_0 = ruleValidID ) ) ) int alt17 = 2 ; int LA17_0 = input . LA ( 1 ) ; if ( ( LA17_0 == 23 ) ) { alt17 = 1 ; } else if ( ( LA17_0 == RULE_ID ) ) { alt17 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return current ; } NoViableAltException nvae = new NoViableAltException ( "" , 17 , 0 , input ) ; throw nvae ; } switch ( alt17 ) { case 1 : // InternalXtype . g : 937:6 : ( ( lv _ wildcard _ 4_0 = ' * ' ) ) { // InternalXtype . g : 937:6 : ( ( lv _ wildcard _ 4_0 = ' * ' ) ) // InternalXtype . g : 938:7 : ( lv _ wildcard _ 4_0 = ' * ' ) { // InternalXtype . g : 938:7 : ( lv _ wildcard _ 4_0 = ' * ' ) // InternalXtype . g : 939:8 : lv _ wildcard _ 4_0 = ' * ' { lv_wildcard_4_0 = ( Token ) match ( input , 23 , FOLLOW_22 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_wildcard_4_0 , grammarAccess . getXImportDeclarationAccess ( ) . getWildcardAsteriskKeyword_1_0_3_0_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXImportDeclarationRule ( ) ) ; } setWithLastConsumed ( current , "wildcard" , true , "*" ) ; } } } } break ; case 2 : // InternalXtype . g : 952:6 : ( ( lv _ memberName _ 5_0 = ruleValidID ) ) { // InternalXtype . g : 952:6 : ( ( lv _ memberName _ 5_0 = ruleValidID ) ) // InternalXtype . g : 953:7 : ( lv _ memberName _ 5_0 = ruleValidID ) { // InternalXtype . g : 953:7 : ( lv _ memberName _ 5_0 = ruleValidID ) // InternalXtype . g : 954:8 : lv _ memberName _ 5_0 = ruleValidID { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXImportDeclarationAccess ( ) . getMemberNameValidIDParserRuleCall_1_0_3_1_0 ( ) ) ; } pushFollow ( FOLLOW_22 ) ; lv_memberName_5_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXImportDeclarationRule ( ) ) ; } set ( current , "memberName" , lv_memberName_5_0 , "org.eclipse.xtext.xbase.Xtype.ValidID" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } } } break ; case 2 : // InternalXtype . g : 974:4 : ( ( ruleQualifiedName ) ) { // InternalXtype . g : 974:4 : ( ( ruleQualifiedName ) ) // InternalXtype . g : 975:5 : ( ruleQualifiedName ) { // InternalXtype . g : 975:5 : ( ruleQualifiedName ) // InternalXtype . g : 976:6 : ruleQualifiedName { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXImportDeclarationRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXImportDeclarationAccess ( ) . getImportedTypeJvmDeclaredTypeCrossReference_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_22 ) ; ruleQualifiedName ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } } break ; case 3 : // InternalXtype . g : 991:4 : ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) { // InternalXtype . g : 991:4 : ( ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) ) // InternalXtype . g : 992:5 : ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) { // InternalXtype . g : 992:5 : ( lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard ) // InternalXtype . g : 993:6 : lv _ importedNamespace _ 7_0 = ruleQualifiedNameWithWildcard { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXImportDeclarationAccess ( ) . getImportedNamespaceQualifiedNameWithWildcardParserRuleCall_1_2_0 ( ) ) ; } pushFollow ( FOLLOW_22 ) ; lv_importedNamespace_7_0 = ruleQualifiedNameWithWildcard ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXImportDeclarationRule ( ) ) ; } set ( current , "importedNamespace" , lv_importedNamespace_7_0 , "org.eclipse.xtext.xbase.Xtype.QualifiedNameWithWildcard" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } // InternalXtype . g : 1011:3 : ( otherlv _ 8 = ' ; ' ) ? int alt19 = 2 ; int LA19_0 = input . LA ( 1 ) ; if ( ( LA19_0 == 27 ) ) { alt19 = 1 ; } switch ( alt19 ) { case 1 : // InternalXtype . g : 1012:4 : otherlv _ 8 = ' ; ' { otherlv_8 = ( Token ) match ( input , 27 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_8 , grammarAccess . getXImportDeclarationAccess ( ) . getSemicolonKeyword_2 ( ) ) ; } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class Settings { /** * Sets a property value only if the value is not null . * @ param key the key for the property * @ param value the value for the property */ public void setIntIfNotNull ( @ NotNull final String key , @ Nullable final Integer value ) { } }
if ( null != value ) { setInt ( key , value ) ; }
public class TinkerforgeEndpoint { /** * Get Header - Parameter or alternative Configured URI Parameter Value */ public < T > T getValue ( Class < T > type , String parameter , Message message , T uriParameterValue ) { } }
if ( message == null && uriParameterValue != null ) { return uriParameterValue ; } T value = message . getHeader ( parameter , type ) ; if ( value == null ) { value = uriParameterValue ; } return value ;
public class ThemeSwitcher { /** * Looks are current theme and retrieves the style * for the given resId set in the theme . * @ param context to retrieve the resolved attribute * @ param styleResId for the given style * @ return resolved style resource Id */ public static int retrieveNavigationViewStyle ( Context context , int styleResId ) { } }
TypedValue outValue = resolveAttributeFromId ( context , styleResId ) ; return outValue . resourceId ;
public class ReplaceInListRepairer { /** * Repairs a local { @ link ReplaceInList } in relation to a remote { @ link ReplaceInList } command . * @ param toRepair * The local command to repair . * @ param repairAgainst * The remote command to repair against . * @ return The repaired command or an empty optional if repairing results in droping the command . */ public Optional < ReplaceInList > repairLocalCommand ( final ReplaceInList toRepair , final ReplaceInList repairAgainst ) { } }
if ( toRepair . getPosition ( ) == repairAgainst . getPosition ( ) ) { return Optional . empty ( ) ; } return Optional . of ( toRepair ) ;
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param style of element * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setStyle ( final String style ) { } }
this . style = style ; return ( T ) this ;
public class ImageSizeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setHRESOL ( Integer newHRESOL ) { } }
Integer oldHRESOL = hresol ; hresol = newHRESOL ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IMAGE_SIZE__HRESOL , oldHRESOL , hresol ) ) ;
public class ImageDrawing { /** * Drawing src bitmap to dest bitmap with applied mask . * @ param src source bitmap * @ param mask bitmap mask * @ param dest destination bitmap */ public static void drawMasked ( Bitmap src , Drawable mask , Bitmap dest ) { } }
drawMasked ( src , mask , dest , CLEAR_COLOR ) ;
public class MolecularFormula { /** * Adds an Isotope to this MolecularFormula in a number of occurrences . * @ param isotope The isotope to be added to this MolecularFormula * @ param count The number of occurrences to add * @ see # addIsotope ( IIsotope ) */ @ Override public IMolecularFormula addIsotope ( IIsotope isotope , int count ) { } }
if ( count == 0 ) return this ; boolean flag = false ; for ( IIsotope thisIsotope : isotopes ( ) ) { if ( isTheSame ( thisIsotope , isotope ) ) { isotopes . put ( thisIsotope , isotopes . get ( thisIsotope ) + count ) ; flag = true ; break ; } } if ( ! flag ) { isotopes . put ( isotope , count ) ; } return this ;
public class Yank { /** * Executes a given INSERT SQL prepared statement matching the sqlKey String in a properties file * loaded via Yank . addSQLStatements ( . . . ) using the default connection pool . Returns the * auto - increment id of the inserted row . * @ param sqlKey The SQL Key found in a properties file corresponding to the desired SQL statement * value * @ param params The replacement parameters * @ return the auto - increment id of the inserted row , or null if no id is available * @ throws SQLStatementNotFoundException if an SQL statement could not be found for the given * sqlKey String */ public static Long insertSQLKey ( String sqlKey , Object [ ] params ) throws SQLStatementNotFoundException , YankSQLException { } }
return insertSQLKey ( YankPoolManager . DEFAULT_POOL_NAME , sqlKey , params ) ;
public class JnlpSlaveAgentProtocol4 { /** * Inject the { @ link IOHubProvider } * @ param hub the hub provider . */ @ Inject public void setHub ( IOHubProvider hub ) { } }
this . hub = hub ; handler = new JnlpProtocol4Handler ( JnlpAgentReceiver . DATABASE , Computer . threadPoolForRemoting , hub . getHub ( ) , sslContext , false , true ) ;
public class TimerGroup { /** * Stop the named timer if the condition is true . */ private long stop ( long time , String name ) { } }
m_total . stop ( time ) ; TimerGroupItem timer = m_timers . get ( getName ( name ) ) ; long elapsedTime = 0 ; if ( timer != null ) { elapsedTime = timer . stop ( time ) ; } checkLog ( time ) ; return elapsedTime ;
public class PushService { /** * https : / / groups . google . com / forum / # ! topic / android - developers / H - DSQ4 - tiac * @ see android . app . Service # onTaskRemoved ( android . content . Intent ) */ @ TargetApi ( Build . VERSION_CODES . ICE_CREAM_SANDWICH ) @ Override public void onTaskRemoved ( Intent rootIntent ) { } }
LOGGER . d ( "try to restart service on task Removed" ) ; if ( isAutoWakeUp ) { Intent restartServiceIntent = new Intent ( getApplicationContext ( ) , this . getClass ( ) ) ; restartServiceIntent . setPackage ( getPackageName ( ) ) ; PendingIntent restartServicePendingIntent = PendingIntent . getService ( getApplicationContext ( ) , 1 , restartServiceIntent , PendingIntent . FLAG_UPDATE_CURRENT ) ; AlarmManager alarmService = ( AlarmManager ) getApplicationContext ( ) . getSystemService ( Context . ALARM_SERVICE ) ; alarmService . set ( AlarmManager . ELAPSED_REALTIME , SystemClock . elapsedRealtime ( ) + 500 , restartServicePendingIntent ) ; } if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . ICE_CREAM_SANDWICH ) { super . onTaskRemoved ( rootIntent ) ; }
public class Humanize { /** * Sort of poor man ' s transliteration , i . e . normalizes and strips * diacritical marks . * < table border = " 0 " cellspacing = " 0 " cellpadding = " 3 " width = " 100 % " > * < tr > * < th class = " colFirst " > Input < / th > * < th class = " colLast " > Output < / th > * < / tr > * < tr > * < td > " J ' étudie le français " < / td > * < td > " J ' etudie le francais " < / td > * < / tr > * < tr > * < td > " Lo siento , no hablo español . " < / td > * < td > " Lo siento , no hablo espanol . " < / td > * < / tr > * < / table > * @ param text * The text to be simplified . * @ return simplified text . */ @ Expose public static String simplify ( final String text ) { } }
String normalized = java . text . Normalizer . normalize ( text , java . text . Normalizer . Form . NFD ) ; return COMB_DIACRITICAL . matcher ( normalized ) . replaceAll ( "" ) ;
public class DelayedExecutor { /** * Executes delayed invocations * @ param target * @ throws Throwable From one of the called methods . */ public void execute ( T target ) throws Throwable { } }
try { for ( Invokation invokation : queue ) { invokation . invoke ( target ) ; } } catch ( InvocationTargetException ex ) { throw ex . getCause ( ) ; } catch ( ReflectiveOperationException ex ) { throw new RuntimeException ( ex ) ; }
public class UsersApi { /** * Log out the agent specified by the dbid . * Log out the agent specified by the dbid . * @ param dbid The dbid of the agent . ( required ) * @ param supervisorPlaceData Request parameters . ( optional ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse supervisorRemotePlaceOperation ( String dbid , SupervisorPlaceData supervisorPlaceData ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = supervisorRemotePlaceOperationWithHttpInfo ( dbid , supervisorPlaceData ) ; return resp . getData ( ) ;
public class WinletDefaultFormattingConversionService { /** * 如果有格式可应用则应用 * @ param bw * @ param obj * @ param prop * @ return */ public static String format ( ConfigurablePropertyAccessor bw , Object obj , String prop ) { } }
String key = obj . getClass ( ) . getName ( ) + "_" + prop ; if ( ! cache . containsKey ( key ) ) canFormat ( bw , obj , prop ) ; Object val = bw . getPropertyValue ( prop ) ; if ( val == null ) return null ; if ( val . getClass ( ) . isArray ( ) ) return null ; if ( ! cache . get ( key ) ) return val . toString ( ) ; TypeDescriptor td = bw . getPropertyTypeDescriptor ( prop ) ; val = get ( ) . convert ( val , td , TypeDescriptor . valueOf ( String . class ) ) ; return val == null ? null : val . toString ( ) ;
public class CommandLineParserUtilities { /** * Return a list of all { @ link Field } in the class { @ code clazz } and its base classes . * @ param clazz class to interrogate for { @ link Field } * @ return list of all { @ link Field } in the class { @ code clazz } and its base classes */ public static List < Field > getAllFields ( Class < ? > clazz ) { } }
final List < Field > ret = new ArrayList < > ( ) ; do { ret . addAll ( Arrays . asList ( clazz . getDeclaredFields ( ) ) ) ; clazz = clazz . getSuperclass ( ) ; } while ( clazz != null ) ; return ret ;
public class Instructions { /** * Utility method to merge instructions from { @ code props2 } into the { @ code props1 } . The instructions * from { @ code props2 } override the instructions from { @ code props1 } ( when both contain the same instruction ) * @ param props1 the first set of instructions * @ param props2 the second set of instructions * @ return the new set of instructions containing the instructions from { @ code props2 } merged into { @ code props1 } . */ public static Properties mergeAndOverrideExisting ( Properties props1 , Properties props2 ) { } }
Properties properties = new Properties ( ) ; properties . putAll ( props1 ) ; properties . putAll ( props2 ) ; return properties ;
public class ClientSessionManager { /** * Expires the manager . * @ return A completable future to be completed once the session has been expired . */ public CompletableFuture < Void > expire ( ) { } }
CompletableFuture < Void > future = new CompletableFuture < > ( ) ; context . executor ( ) . execute ( ( ) -> { if ( keepAlive != null ) keepAlive . cancel ( ) ; state . setState ( Session . State . EXPIRED ) ; future . complete ( null ) ; } ) ; return future ;
public class GroupMemberImpl { /** * Returns an < code > Iterator < / code > over this < code > IGroupMember ' s < / code > parent groups . * Synchronize the collection of keys with adds and removes . * @ return Iterator */ @ Override public Set < IEntityGroup > getParentGroups ( ) throws GroupsException { } }
final EntityIdentifier cacheKey = getUnderlyingEntityIdentifier ( ) ; Element element = parentGroupsCache . get ( cacheKey ) ; if ( element == null ) { final Set < IEntityGroup > groups = buildParentGroupsSet ( ) ; element = new Element ( cacheKey , groups ) ; parentGroupsCache . put ( element ) ; } @ SuppressWarnings ( "unchecked" ) final Set < IEntityGroup > rslt = ( Set < IEntityGroup > ) element . getObjectValue ( ) ; return rslt ;
public class ClutoSparseFileIterator { /** * Reads a line and splits it into [ col value ] pairs . * @ throws IllegalArgumentException If there are no more lines to read . */ private String [ ] readLine ( ) throws IOException { } }
// Read the next number of non zeros . String line = reader . readLine ( ) ; if ( line == null ) throw new IllegalArgumentException ( "The matrix file is improperly formatted" ) ; return line . split ( "\\s+" ) ;