signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ClientConfiguration { /** * Configure the list of authentication methods that should be used when authenticating against an HTTP proxy , in the order * they should be attempted . Any methods not included in this list will not be attempted . If one authentication method fails , * the next method will be attempted , until a working method is found ( or all methods have been attempted ) . * < p > Setting this value to null indicates using the default behavior , which is to try all authentication methods in an * unspecified order . < / p > * @ param proxyAuthenticationMethods The proxy authentication methods to be attempted , in the order they should be attempted . */ public void setProxyAuthenticationMethods ( List < ProxyAuthenticationMethod > proxyAuthenticationMethods ) { } }
if ( proxyAuthenticationMethods == null ) { this . proxyAuthenticationMethods = null ; } else { ValidationUtils . assertNotEmpty ( proxyAuthenticationMethods , "proxyAuthenticationMethods" ) ; this . proxyAuthenticationMethods = Collections . unmodifiableList ( new ArrayList < ProxyAuthenticationMethod > ( proxyAuthenticationMethods ) ) ; }
public class SAML2AttributeNameToIdMapperService { /** * Returns the mapping between attribute names and their Shibboleth ID : s . * @ return a mapping */ private Map < String , String > getMapping ( ) { } }
if ( this . attributesMapping != null && this . lastReload != null && this . lastReload . equals ( this . attributeResolverService . getLastSuccessfulReloadInstant ( ) ) ) { return this . attributesMapping ; } // Reload . ServiceableComponent < AttributeResolver > component = null ; Map < String , String > am = null ; try { // Get date before we get the component . That way we ' ll not leak changes . final DateTime when = this . attributeResolverService . getLastSuccessfulReloadInstant ( ) ; component = this . attributeResolverService . getServiceableComponent ( ) ; if ( null == component ) { if ( ! captiveServiceReloadFailed ) { logger . error ( "Invalid AttributeResolver configuration" ) ; } captiveServiceReloadFailed = true ; } else { final AttributeResolver attributeResolver = component . getComponent ( ) ; am = new HashMap < > ( ) ; Map < String , AttributeDefinition > map = attributeResolver . getAttributeDefinitions ( ) ; for ( Map . Entry < String , AttributeDefinition > entry : map . entrySet ( ) ) { String name = null ; Set < AttributeEncoder < ? > > encoders = entry . getValue ( ) . getAttributeEncoders ( ) ; for ( AttributeEncoder < ? > encoder : encoders ) { if ( encoder instanceof SAML2AttributeEncoder ) { name = ( ( SAML2AttributeEncoder < ? > ) encoder ) . getName ( ) ; if ( name != null ) { break ; } } } if ( name != null ) { logger . debug ( "Adding mapping between SAML2 attribute '{}' and id '{}'" , name , entry . getKey ( ) ) ; am . put ( name , entry . getKey ( ) ) ; } else { logger . debug ( "No mapping to SAML2 attribute for attribute id '{}'" , entry . getKey ( ) ) ; } } captiveServiceReloadFailed = false ; lastReload = when ; } } finally { if ( null != component ) { component . unpinComponent ( ) ; } } this . attributesMapping = am ; return am ;
public class MerlinReader { /** * Read relation data . */ private void processDependencies ( ) throws SQLException { } }
List < Row > rows = getRows ( "select * from zdependency where zproject=?" , m_projectID ) ; for ( Row row : rows ) { Task nextTask = m_project . getTaskByUniqueID ( row . getInteger ( "ZNEXTACTIVITY_" ) ) ; Task prevTask = m_project . getTaskByUniqueID ( row . getInteger ( "ZPREVIOUSACTIVITY_" ) ) ; Duration lag = row . getDuration ( "ZLAG_" ) ; RelationType type = row . getRelationType ( "ZTYPE" ) ; Relation relation = nextTask . addPredecessor ( prevTask , type , lag ) ; relation . setUniqueID ( row . getInteger ( "Z_PK" ) ) ; }
public class CompoundServiceFilter { /** * Calls both filters ' successful ( ) method , ignoring anything thrown . */ @ Override public void successful ( DataBinder parameters ) { } }
try { first . successful ( parameters ) ; } catch ( Throwable t ) { } try { second . successful ( parameters ) ; } catch ( Throwable t ) { }
public class NtlmPasswordAuthentication { /** * Generate the Unicode MD4 hash for the password associated with these credentials . */ static public byte [ ] getNTLMResponse ( String password , byte [ ] challenge ) { } }
byte [ ] uni = null ; byte [ ] p21 = new byte [ 21 ] ; byte [ ] p24 = new byte [ 24 ] ; try { uni = password . getBytes ( SmbConstants . UNI_ENCODING ) ; } catch ( UnsupportedEncodingException uee ) { if ( log . level > 0 ) uee . printStackTrace ( log ) ; } MD4 md4 = new MD4 ( ) ; md4 . update ( uni ) ; try { md4 . digest ( p21 , 0 , 16 ) ; } catch ( Exception ex ) { if ( log . level > 0 ) ex . printStackTrace ( log ) ; } E ( p21 , challenge , p24 ) ; return p24 ;
public class AbstractTrafficShapingHandler { /** * < p > Note the change will be taken as best effort , meaning * that all already scheduled traffics will not be * changed , but only applied to new traffics . < / p > * < p > So the expected usage of this method is to be used not too often , * accordingly to the traffic shaping configuration . < / p > * @ param writeLimit the writeLimit to set */ public void setWriteLimit ( long writeLimit ) { } }
this . writeLimit = writeLimit ; if ( trafficCounter != null ) { trafficCounter . resetAccounting ( TrafficCounter . milliSecondFromNano ( ) ) ; }
public class TypeUtility { /** * Returns a string with type parameters replaced with wildcards . This is slightly different from { @ link Types # erasure ( javax . lang . model . type . TypeMirror ) } , which removes all * type parameter data . * For instance , if there is a field with type List & lt ; String & gt ; , this returns a string List & lt ; ? & gt ; . * @ param declaredType * the declared type * @ return the canonical type name */ private static String getCanonicalTypeName ( DeclaredType declaredType ) { } }
List < ? extends TypeMirror > typeArguments = declaredType . getTypeArguments ( ) ; if ( ! typeArguments . isEmpty ( ) ) { StringBuilder typeString = new StringBuilder ( declaredType . asElement ( ) . toString ( ) ) ; typeString . append ( '<' ) ; for ( int i = 0 ; i < typeArguments . size ( ) ; i ++ ) { if ( i > 0 ) { typeString . append ( ',' ) ; } typeString . append ( '?' ) ; } typeString . append ( '>' ) ; return typeString . toString ( ) ; } else { return declaredType . toString ( ) ; }
public class XmpSchema { /** * Processes a property * @ param buf * @ param p */ protected void process ( StringBuffer buf , Object p ) { } }
buf . append ( '<' ) ; buf . append ( p ) ; buf . append ( '>' ) ; buf . append ( this . get ( p ) ) ; buf . append ( "</" ) ; buf . append ( p ) ; buf . append ( '>' ) ;
public class DefaultCommandRegistry { /** * { @ inheritDoc } * @ deprecated */ public ActionCommand getActionCommand ( String commandId ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Attempting to retrieve ActionCommand with id [" + commandId + "] from the command registry." ) ; } Object command = getCommand ( commandId , ActionCommand . class ) ; return ( ActionCommand ) command ;
public class ZFSInstaller { /** * Called from the confirmation screen to actually initiate the migration . */ @ RequirePOST public void doStart ( StaplerRequest req , StaplerResponse rsp , @ QueryParameter String username , @ QueryParameter String password ) throws ServletException , IOException { } }
Jenkins hudson = Jenkins . getInstance ( ) ; hudson . checkPermission ( Jenkins . ADMINISTER ) ; final String datasetName ; ByteArrayOutputStream log = new ByteArrayOutputStream ( ) ; StreamTaskListener listener = new StreamTaskListener ( log ) ; try { datasetName = createZfsFileSystem ( listener , username , password ) ; } catch ( Exception e ) { Functions . printStackTrace ( e , listener . error ( e . getMessage ( ) ) ) ; if ( e instanceof ZFSException ) { ZFSException ze = ( ZFSException ) e ; if ( ze . getCode ( ) == ErrorCode . EZFS_PERM ) { // permission problem . ask the user to give us the root password req . setAttribute ( "message" , log . toString ( ) ) ; rsp . forward ( this , "askRootPassword" , req ) ; return ; } } // for other kinds of problems , report and bail out req . setAttribute ( "pre" , true ) ; sendError ( log . toString ( ) , req , rsp ) ; return ; } // file system creation successful , so restart hudson . servletContext . setAttribute ( "app" , new HudsonIsRestarting ( ) ) ; // redirect the user to the manage page rsp . sendRedirect2 ( req . getContextPath ( ) + "/manage" ) ; // asynchronously restart , so that we can give a bit of time to the browser to load " restarting . . . " screen . new Thread ( "restart thread" ) { @ Override public void run ( ) { try { Thread . sleep ( 5000 ) ; // close all descriptors on exec except stdin , out , err int sz = LIBC . getdtablesize ( ) ; for ( int i = 3 ; i < sz ; i ++ ) { int flags = LIBC . fcntl ( i , F_GETFD ) ; if ( flags < 0 ) continue ; LIBC . fcntl ( i , F_SETFD , flags | FD_CLOEXEC ) ; } // re - exec with the system property to indicate where to migrate the data to . // the 2nd phase is implemented in the migrate method . JavaVMArguments args = JavaVMArguments . current ( ) ; args . setSystemProperty ( ZFSInstaller . class . getName ( ) + ".migrate" , datasetName ) ; Daemon . selfExec ( args ) ; } catch ( InterruptedException | IOException e ) { LOGGER . log ( Level . SEVERE , "Restart failed" , e ) ; } } } . start ( ) ;
public class GenericUrl { /** * Returns the raw encoded path computed from the { @ link # pathParts } . * @ return raw encoded path computed from the { @ link # pathParts } or { @ code null } if { @ link * # pathParts } is { @ code null } */ public String getRawPath ( ) { } }
List < String > pathParts = this . pathParts ; if ( pathParts == null ) { return null ; } StringBuilder buf = new StringBuilder ( ) ; appendRawPathFromParts ( buf ) ; return buf . toString ( ) ;
public class CRestBuilder { /** * < p > Adds given property to the { @ link org . codegist . crest . CRestConfig } that will be passed to all < b > CRest < / b > components . < / p > * < p > Note that this property can be used to override defaut < b > CRest < / b > ' s MethodConfig and ParamConfig values when none are provided through annotations . < / p > * @ param name property name * @ param value property value * @ return current builder * @ see org . codegist . crest . CRestConfig * @ see org . codegist . crest . config . MethodConfig * @ see org . codegist . crest . config . ParamConfig */ public CRestBuilder property ( String name , Object value ) { } }
return addProperties ( singletonMap ( name , value ) ) ;
public class MediathekWdr { @ Override public synchronized void addToList ( ) { } }
clearLists ( ) ; meldungStart ( ) ; fillLists ( ) ; if ( Config . getStop ( ) ) { meldungThreadUndFertig ( ) ; } else if ( letterPageUrls . isEmpty ( ) && dayUrls . isEmpty ( ) ) { meldungThreadUndFertig ( ) ; } else { meldungAddMax ( letterPageUrls . size ( ) + dayUrls . size ( ) ) ; startLetterPages ( ) ; startDayPages ( ) ; addFilms ( ) ; meldungThreadUndFertig ( ) ; }
public class GenericInfoUtils { /** * When building inlying context , target type may be inner class , and if root context contains owner type * then we can assume that it ' s known more specific generics may be used . This is not correct in general , * as inner class may be created inside different class , but in most cases inner classes are used within * outer class and the chance that different outer class hierarchies will interact are quite low . * Storing all types , not present in target class hierarchy ( to avoid affecting actual generics resolution ) * @ param type target ( inlying ) type * @ param info root context generics info ( possibly outer ) * @ return possible owner classes , not present in target type hierarchy */ private static Map < Class < ? > , LinkedHashMap < String , Type > > usePossiblyOwnerGenerics ( final Class < ? > type , final GenericsInfo info ) { } }
final Map < Class < ? > , LinkedHashMap < String , Type > > res = new HashMap < Class < ? > , LinkedHashMap < String , Type > > ( ) ; // use only types , not included in target hierarchy for ( Class < ? > root : info . getComposingTypes ( ) ) { if ( ! root . isAssignableFrom ( type ) ) { res . put ( root , ( LinkedHashMap < String , Type > ) info . getTypeGenerics ( root ) ) ; } } return res ;
public class DdlUtils { /** * The connection is commited in this method but not closed . */ public static void createSchema ( Connection connection , String dialect , boolean createSchemaMigrations ) { } }
if ( createSchemaMigrations ) { executeScript ( connection , "org/sonar/db/version/schema_migrations-" + dialect + ".ddl" ) ; } executeScript ( connection , "org/sonar/db/version/schema-" + dialect + ".ddl" ) ; executeScript ( connection , "org/sonar/db/version/rows-" + dialect + ".sql" ) ;
public class AbstractAuthenticationStrategy { /** * Determines a common < code > User < / code > corresponding to the given list of e - mail addresses . If none of the e - mail addresses are in * use , returns < code > null < / code > . If the e - mail addresses are associated to multiple user accounts , an < code > SmvcRuntimeException < / code > * is thrown . * @ param requestContext Request context * @ param emails The list of e - mail addresses to validate * @ return The common < code > User < / code > corresponding to the given list of e - mail addresses , or < code > null < / code > if the addresses are * not in use * @ throws SmvcRuntimeException If the e - mail addresses are associated to multiple user accounts */ private User resolveUser ( RequestContext requestContext , List < String > emails ) { } }
User user = null ; UserEmailDAO userEmailDAO = new UserEmailDAO ( ) ; for ( String email : emails ) { UserEmail userEmail = userEmailDAO . findByAddress ( email ) ; if ( userEmail != null ) { if ( user == null ) { user = userEmail . getUser ( ) ; } else if ( ! user . getId ( ) . equals ( userEmail . getUser ( ) . getId ( ) ) ) { Messages messages = Messages . getInstance ( ) ; Locale locale = requestContext . getRequest ( ) . getLocale ( ) ; throw new SmvcRuntimeException ( EdelfoiStatusCode . LOGIN_MULTIPLE_ACCOUNTS , messages . getText ( locale , "exception.1023.loginMultipleAccounts" ) ) ; } } } return user ;
public class DirectBufferOutputStream { /** * Write a byte [ ] to the buffer . * @ param srcBytes to write * @ param srcOffset at which to begin reading bytes from the srcBytes . * @ param length of the srcBytes to read . * @ throws IllegalStateException if insufficient capacity remains in the buffer . */ public void write ( final byte [ ] srcBytes , final int srcOffset , final int length ) { } }
final long resultingOffset = position + ( ( long ) length ) ; if ( resultingOffset > this . length ) { throw new IllegalStateException ( "insufficient capacity in the buffer" ) ; } buffer . putBytes ( offset + position , srcBytes , srcOffset , length ) ; position += length ;
public class Try { /** * Flats nested { @ link Try } of { @ link Try } into flatten one . * @ param nestedTry nested try to flatten * @ param < U > computation type * @ return flatten Try */ public static < U > Try < U > flatten ( final Try < ? extends Try < ? extends U > > nestedTry ) { } }
if ( nestedTry . isFailure ( ) ) { return fromError ( nestedTry . getError ( ) ) ; } return nestedTry . getValue ( ) . map ( identity ( ) ) ;
public class MbeanImplCodeGen { /** * Output Constructor * @ param def definition * @ param out Writer * @ param indent space number * @ throws IOException ioException */ void writeConstructor ( Definition def , Writer out , int indent ) throws IOException { } }
writeSimpleMethodSignature ( out , indent , " * Default constructor" , "public " + getClassName ( def ) + "()" ) ; writeLeftCurlyBracket ( out , indent ) ; writeWithIndent ( out , indent + 1 , "this.mbeanServer = null;\n" ) ; writeWithIndent ( out , indent + 1 , "this.objectName = \"" + def . getDefaultValue ( ) + ",class=HelloWorld\";\n" ) ; writeWithIndent ( out , indent + 1 , "this.registered = false;\n\n" ) ; writeRightCurlyBracket ( out , indent ) ; writeEol ( out ) ;
public class CPAttachmentFileEntryUtil { /** * Returns the cp attachment file entry where classNameId = & # 63 ; and classPK = & # 63 ; and fileEntryId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache . * @ param classNameId the class name ID * @ param classPK the class pk * @ param fileEntryId the file entry ID * @ return the matching cp attachment file entry , or < code > null < / code > if a matching cp attachment file entry could not be found */ public static CPAttachmentFileEntry fetchByC_C_F ( long classNameId , long classPK , long fileEntryId ) { } }
return getPersistence ( ) . fetchByC_C_F ( classNameId , classPK , fileEntryId ) ;
public class VdmEvaluationAction { /** * IVdmEvaluationListener */ public void evaluationComplete ( IVdmEvaluationResult result ) { } }
// if plug - in has shutdown , ignore - see bug # 8693 if ( VdmDebugPlugin . getDefault ( ) == null ) { return ; } final IVdmValue value = result . getValue ( ) ; if ( result . hasErrors ( ) || value != null ) { final Display display = VdmDebugPlugin . getStandardDisplay ( ) ; if ( display . isDisposed ( ) ) { return ; } // Each action should implement this method for own purposes displayResult ( result ) ; }
public class FieldInitializer { /** * initFromProperties - initializes fields ( static and non - static ) in an Object based * on system properties . For example , given Object o of class com . company . Foo * with field cacheSize . The following system property would change the field * - Dcom . company . Foo . cacheSize = 100 */ static public void initFromSystemProperties ( Object o , Properties prop ) { } }
updateFieldsFromProperties ( o . getClass ( ) , o , prop ) ;
public class TailOf { /** * Copy buffer to response for read count smaller then buffer size . * @ param buffer The buffer array * @ param response The response array * @ param num Number of bytes in response array from previous read * @ param read Number of bytes read in the buffer * @ return New count of bytes in the response array * @ checkstyle ParameterNumberCheck ( 3 lines ) */ private int copyPartial ( final byte [ ] buffer , final byte [ ] response , final int num , final int read ) { } }
final int result ; if ( num > 0 ) { System . arraycopy ( response , read , response , 0 , this . count - read ) ; System . arraycopy ( buffer , 0 , response , this . count - read , read ) ; result = this . count ; } else { System . arraycopy ( buffer , 0 , response , 0 , read ) ; result = read ; } return result ;
public class CryptoKey { /** * < code > . google . privacy . dlp . v2 . UnwrappedCryptoKey unwrapped = 2 ; < / code > */ public com . google . privacy . dlp . v2 . UnwrappedCryptoKeyOrBuilder getUnwrappedOrBuilder ( ) { } }
if ( sourceCase_ == 2 ) { return ( com . google . privacy . dlp . v2 . UnwrappedCryptoKey ) source_ ; } return com . google . privacy . dlp . v2 . UnwrappedCryptoKey . getDefaultInstance ( ) ;
public class CacheLoader { /** * Loads multiple values to the cache . * < p > From inside this method it is illegal to call methods on the same cache . This * may cause a deadlock . * < p > The method is provided to complete the API . At the moment cache2k is not * using it . Please see the road map . * @ param keys set of keys for the values to be loaded * @ param executor an executor for concurrent loading * @ return The loaded values . A key may map to { @ code null } if the cache permits { @ code null } values . * @ throws Exception Unhandled exception from the loader . Exceptions are suppressed or * wrapped and rethrown via a { @ link CacheLoaderException } . * If an exception happens the cache may retry the load with the * single value load method . */ public Map < K , V > loadAll ( Iterable < ? extends K > keys , Executor executor ) throws Exception { } }
throw new UnsupportedOperationException ( ) ;
public class AbstractAmazonDynamoDBAsync { /** * Simplified method form for invoking the Scan operation . * @ see # scanAsync ( ScanRequest ) */ @ Override public java . util . concurrent . Future < ScanResult > scanAsync ( String tableName , java . util . List < String > attributesToGet ) { } }
return scanAsync ( new ScanRequest ( ) . withTableName ( tableName ) . withAttributesToGet ( attributesToGet ) ) ;
public class DiffBuilder { /** * Compare the Test - XML { @ link # withTest ( Object ) } with the Control - XML { @ link # compare ( Object ) } and return the * collected differences in a { @ link Diff } object . */ public Diff build ( ) { } }
final DOMDifferenceEngine d = new DOMDifferenceEngine ( ) ; final CollectResultsListener collectResultsListener = new CollectResultsListener ( comparisonResultsToCheck ) ; d . addDifferenceListener ( collectResultsListener ) ; if ( nodeMatcher != null ) { d . setNodeMatcher ( nodeMatcher ) ; } d . setDifferenceEvaluator ( differenceEvaluator ) ; d . setComparisonController ( comparisonController ) ; for ( ComparisonListener comparisonListener : comparisonListeners ) { d . addComparisonListener ( comparisonListener ) ; } for ( ComparisonListener comparisonListener : differenceListeners ) { d . addDifferenceListener ( comparisonListener ) ; } if ( namespaceContext != null ) { d . setNamespaceContext ( namespaceContext ) ; } if ( attributeFilter != null ) { d . setAttributeFilter ( attributeFilter ) ; } if ( nodeFilter != null ) { d . setNodeFilter ( nodeFilter ) ; } if ( documentBuilderFactory != null ) { d . setDocumentBuilderFactory ( documentBuilderFactory ) ; } d . compare ( wrap ( controlSource ) , wrap ( testSource ) ) ; return formatter == null ? new Diff ( controlSource , testSource , collectResultsListener . getDifferences ( ) ) : new Diff ( controlSource , testSource , formatter , collectResultsListener . getDifferences ( ) ) ;
public class InstrumentedExecutors { /** * Creates a single - threaded instrumented executor that can schedule commands * to run after a given delay , or to execute periodically . ( Note * however that if this single thread terminates due to a failure * during execution prior to shutdown , a new one will take its * place if needed to execute subsequent tasks . ) Tasks are * guaranteed to execute sequentially , and no more than one task * will be active at any given time . Unlike the otherwise * equivalent { @ code newScheduledThreadPool ( 1 , threadFactory ) } * the returned executor is guaranteed not to be reconfigurable to * use additional threads . * @ param threadFactory the factory to use when creating new threads * @ param registry the { @ link MetricRegistry } that will contain the metrics . * @ return a newly created scheduled executor * @ throws NullPointerException if threadFactory is null * @ see Executors # newSingleThreadExecutor ( ThreadFactory ) */ public static InstrumentedScheduledExecutorService newSingleThreadScheduledExecutor ( ThreadFactory threadFactory , MetricRegistry registry ) { } }
return new InstrumentedScheduledExecutorService ( Executors . newSingleThreadScheduledExecutor ( threadFactory ) , registry ) ;
public class MessageToByteEncoder { /** * Allocate a { @ link ByteBuf } which will be used as argument of { @ link # encode ( ChannelHandlerContext , I , ByteBuf ) } . * Sub - classes may override this method to return { @ link ByteBuf } with a perfect matching { @ code initialCapacity } . */ protected ByteBuf allocateBuffer ( ChannelHandlerContext ctx , @ SuppressWarnings ( "unused" ) I msg , boolean preferDirect ) throws Exception { } }
if ( preferDirect ) { return ctx . alloc ( ) . ioBuffer ( ) ; } else { return ctx . alloc ( ) . heapBuffer ( ) ; }
public class AlignmentTools { /** * After the alignment changes ( optAln , optLen , blockNum , at a minimum ) , * many other properties which depend on the superposition will be invalid . * This method re - runs a rigid superposition over the whole alignment * and repopulates the required properties , including RMSD ( TotalRMSD ) and * TM - Score . * @ param afpChain * @ param ca1 * @ param ca2 Second set of ca atoms . Will be modified based on the superposition * @ throws StructureException * @ see { @ link CECalculator # calc _ rmsd ( Atom [ ] , Atom [ ] , int , boolean ) } * contains much of the same code , but stores results in a CECalculator * instance rather than an AFPChain */ public static void updateSuperposition ( AFPChain afpChain , Atom [ ] ca1 , Atom [ ] ca2 ) throws StructureException { } }
// Update ca information , because the atom array might also be changed afpChain . setCa1Length ( ca1 . length ) ; afpChain . setCa2Length ( ca2 . length ) ; // We need this to get the correct superposition int [ ] focusRes1 = afpChain . getFocusRes1 ( ) ; int [ ] focusRes2 = afpChain . getFocusRes2 ( ) ; if ( focusRes1 == null ) { focusRes1 = new int [ afpChain . getCa1Length ( ) ] ; afpChain . setFocusRes1 ( focusRes1 ) ; } if ( focusRes2 == null ) { focusRes2 = new int [ afpChain . getCa2Length ( ) ] ; afpChain . setFocusRes2 ( focusRes2 ) ; } if ( afpChain . getNrEQR ( ) == 0 ) return ; // create new arrays for the subset of atoms in the alignment . Atom [ ] ca1aligned = new Atom [ afpChain . getOptLength ( ) ] ; Atom [ ] ca2aligned = new Atom [ afpChain . getOptLength ( ) ] ; fillAlignedAtomArrays ( afpChain , ca1 , ca2 , ca1aligned , ca2aligned ) ; // Superimpose the two structures in correspondance to the new alignment Matrix4d trans = SuperPositions . superpose ( Calc . atomsToPoints ( ca1aligned ) , Calc . atomsToPoints ( ca2aligned ) ) ; Matrix matrix = Matrices . getRotationJAMA ( trans ) ; Atom shift = Calc . getTranslationVector ( trans ) ; Matrix [ ] blockMxs = new Matrix [ afpChain . getBlockNum ( ) ] ; Arrays . fill ( blockMxs , matrix ) ; afpChain . setBlockRotationMatrix ( blockMxs ) ; Atom [ ] blockShifts = new Atom [ afpChain . getBlockNum ( ) ] ; Arrays . fill ( blockShifts , shift ) ; afpChain . setBlockShiftVector ( blockShifts ) ; for ( Atom a : ca2aligned ) { Calc . rotate ( a , matrix ) ; Calc . shift ( a , shift ) ; } // Calculate the RMSD and TM score for the new alignment double rmsd = Calc . rmsd ( ca1aligned , ca2aligned ) ; double tmScore = Calc . getTMScore ( ca1aligned , ca2aligned , ca1 . length , ca2 . length ) ; afpChain . setTotalRmsdOpt ( rmsd ) ; afpChain . setTMScore ( tmScore ) ; int [ ] blockLens = afpChain . getOptLen ( ) ; int [ ] [ ] [ ] optAln = afpChain . getOptAln ( ) ; // Calculate the RMSD and TM score for every block of the new alignment double [ ] blockRMSD = new double [ afpChain . getBlockNum ( ) ] ; double [ ] blockScore = new double [ afpChain . getBlockNum ( ) ] ; for ( int k = 0 ; k < afpChain . getBlockNum ( ) ; k ++ ) { // Create the atom arrays corresponding to the aligned residues in the block Atom [ ] ca1block = new Atom [ afpChain . getOptLen ( ) [ k ] ] ; Atom [ ] ca2block = new Atom [ afpChain . getOptLen ( ) [ k ] ] ; int position = 0 ; for ( int i = 0 ; i < blockLens [ k ] ; i ++ ) { int pos1 = optAln [ k ] [ 0 ] [ i ] ; int pos2 = optAln [ k ] [ 1 ] [ i ] ; Atom a1 = ca1 [ pos1 ] ; Atom a2 = ( Atom ) ca2 [ pos2 ] . clone ( ) ; ca1block [ position ] = a1 ; ca2block [ position ] = a2 ; position ++ ; } if ( position != afpChain . getOptLen ( ) [ k ] ) { logger . warn ( "AFPChainScorer getTMScore: Problems reconstructing block alignment! nr of loaded atoms is " + position + " but should be " + afpChain . getOptLen ( ) [ k ] ) ; // we need to resize the array , because we allocated too many atoms earlier on . ca1block = ( Atom [ ] ) resizeArray ( ca1block , position ) ; ca2block = ( Atom [ ] ) resizeArray ( ca2block , position ) ; } // Superimpose the two block structures Matrix4d transb = SuperPositions . superpose ( Calc . atomsToPoints ( ca1block ) , Calc . atomsToPoints ( ca2block ) ) ; blockMxs [ k ] = Matrices . getRotationJAMA ( trans ) ; blockShifts [ k ] = Calc . getTranslationVector ( trans ) ; Calc . transform ( ca2block , transb ) ; // Calculate the RMSD and TM score for the block double rmsdb = Calc . rmsd ( ca1block , ca2block ) ; double tmScoreb = Calc . getTMScore ( ca1block , ca2block , ca1 . length , ca2 . length ) ; blockRMSD [ k ] = rmsdb ; blockScore [ k ] = tmScoreb ; } afpChain . setOptRmsd ( blockRMSD ) ; afpChain . setBlockRmsd ( blockRMSD ) ; afpChain . setBlockScore ( blockScore ) ;
public class GeneratorXMLDatabaseConnection { /** * Process a ' dataset ' Element in the XML stream . * @ param gen Generator * @ param cur Current document nod */ private void processElementDataset ( GeneratorMain gen , Node cur ) { } }
// * * * get parameters String seedstr = ( ( Element ) cur ) . getAttribute ( ATTR_SEED ) ; if ( clusterRandom != RandomFactory . DEFAULT && seedstr != null && seedstr . length ( ) > 0 ) { clusterRandom = new RandomFactory ( ( long ) ( ParseUtil . parseIntBase10 ( seedstr ) * sizescale ) ) ; } String testmod = ( ( Element ) cur ) . getAttribute ( ATTR_TEST ) ; if ( testmod != null && testmod . length ( ) > 0 ) { testAgainstModel = Boolean . valueOf ( ParseUtil . parseIntBase10 ( testmod ) != 0 ) ; } // TODO : check for unknown attributes . XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( TAG_CLUSTER . equals ( child . getNodeName ( ) ) ) { processElementCluster ( gen , child ) ; } else if ( TAG_STATIC . equals ( child . getNodeName ( ) ) ) { processElementStatic ( gen , child ) ; } else if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } }
public class ExpressionToken { /** * Update the value of < code > key < / code > in < code > map < / code > * @ param map the map * @ param key the key * @ param value the value */ protected final void mapUpdate ( Map map , Object key , Object value ) { } }
Object o = map . get ( key ) ; /* If a value exists in map . get ( key ) , convert the " value " parameter into the type of map . get ( key ) . It ' s a best guess as to what the type of the Map _ should _ be without any further reflective information about the types contained in the map . */ if ( o != null ) { Class type = o . getClass ( ) ; value = ParseUtils . convertType ( value , type ) ; } map . put ( key , value ) ;
public class MetadataDao { /** * Delete the Metadata , cascading * @ param metadata * metadata * @ return deleted count * @ throws SQLException * upon failure */ public int deleteCascade ( Metadata metadata ) throws SQLException { } }
int count = 0 ; if ( metadata != null ) { // Delete Metadata References and remove parent references MetadataReferenceDao dao = getMetadataReferenceDao ( ) ; dao . deleteByMetadata ( metadata . getId ( ) ) ; dao . removeMetadataParent ( metadata . getId ( ) ) ; // Delete count = delete ( metadata ) ; } return count ;
public class OQL { /** * LogicalBinding * ( b _ 1 AND b _ 2 AND . . . AND b _ n ) * ( b _ 1 OR b _ 2 OR . . . OR b _ n ) */ private void _buildLogicalBinding ( final LogicalBinding binding , final StringBuilder stmt , final List < Object > params ) { } }
final int size = binding . size ( ) ; if ( size < 2 ) { throw new IllegalArgumentException ( "LogicalBinding with less than two element bindings" ) ; } final String logic = ( binding instanceof AndBinding ) ? _AND_ : _OR_ ; stmt . append ( " (" ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( i > 0 ) { stmt . append ( logic ) ; } _buildBinding ( binding . getElementAt ( i ) , stmt , params ) ; } stmt . append ( ")" ) ;
public class Gmp { /** * Calculate ( base ^ exponent ) % modulus ; slower , hardened against timing attacks . * < p > NOTE : this methods REQUIRES modulus to be odd , due to a crash - bug in libgmp . This is not a * problem for RSA where the modulus is always odd . < / p > * @ param base the base , must be positive * @ param exponent the exponent * @ param modulus the modulus * @ return the ( base ^ exponent ) % modulus * @ throws ArithmeticException if modulus is non - positive , or the exponent is negative and the * base cannot be inverted * @ throws IllegalArgumentException if modulus is even */ public static BigInteger modPowSecure ( BigInteger base , BigInteger exponent , BigInteger modulus ) { } }
if ( modulus . signum ( ) <= 0 ) { throw new ArithmeticException ( "modulus must be positive" ) ; } if ( ! modulus . testBit ( 0 ) ) { throw new IllegalArgumentException ( "modulus must be odd" ) ; } return INSTANCE . get ( ) . modPowSecureImpl ( base , exponent , modulus ) ;
public class BasicBinder { /** * Resolve a Marshaller with the given source and target class . * The marshaller is used as follows : Instances of the source can be marshalled into the target class . * @ param source The source ( input ) class * @ param target The target ( output ) class * @ param qualifier The qualifier for which the marshaller must be registered */ public < S , T > ToMarshaller < S , T > findMarshaller ( Class < S > source , Class < T > target , Class < ? extends Annotation > qualifier ) { } }
return findMarshaller ( new ConverterKey < S , T > ( source , target , qualifier == null ? DefaultBinding . class : qualifier ) ) ;
public class FilenameUtil { /** * Checks a filename to see if it matches the specified wildcard filter * allowing control over case - sensitivity . * The wildcard filter uses the characters ' ? ' and ' * ' to represent a * single or multiple ( zero or more ) wildcard characters . * N . B . the sequence " * ? " does not work properly at present in match strings . * @ param filename the filename to match on * @ param wildcardfilter the wildcard string to match against * @ param caseSensitivity what case sensitivity rule to use , null means case - sensitive * @ return true if the filename matches the wilcard string */ public static boolean wildcardMatch ( final String filename , final String wildcardfilter , boolean caseSensitivity ) { } }
return wildcardMatch ( filename , wildcardfilter , caseSensitivity ? IOCase . SENSITIVE : IOCase . INSENSITIVE ) ;
public class WebApp { /** * ( non - Javadoc ) * @ see * com . ibm . websphere . servlet . context . IBMServletContext # loadServlet ( java . * lang . String ) */ public void loadServlet ( String servletName ) throws ServletException , SecurityException { } }
SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { sm . checkPermission ( perm ) ; } ServletWrapper s ; try { s = ( ServletWrapper ) getServletWrapper ( servletName ) ; if ( s != null ) { s . load ( ) ; } } catch ( Exception e ) { throw new ServletException ( "Servlet load failed: " + e . getMessage ( ) ) ; }
public class MPDConnectionMonitor { /** * Sends the appropriate { @ link org . bff . javampd . server . ConnectionChangeEvent } to all registered * { @ link ConnectionChangeListener } s . * @ param isConnected the connection status */ protected synchronized void fireConnectionChangeEvent ( boolean isConnected ) { } }
ConnectionChangeEvent cce = new ConnectionChangeEvent ( this , isConnected ) ; for ( ConnectionChangeListener ccl : connectionListeners ) { ccl . connectionChangeEventReceived ( cce ) ; }
public class TypeUtils { /** * Get a type representing { @ code type } with variable assignments " unrolled . " * @ param typeArguments as from { @ link TypeUtils # getTypeArguments ( Type , Class ) } * @ param type the type to unroll variable assignments for * @ return Type * @ since 3.2 */ public static Type unrollVariables ( Map < TypeVariable < ? > , Type > typeArguments , final Type type ) { } }
if ( typeArguments == null ) { typeArguments = Collections . emptyMap ( ) ; } if ( containsTypeVariables ( type ) ) { if ( type instanceof TypeVariable < ? > ) { return unrollVariables ( typeArguments , typeArguments . get ( type ) ) ; } if ( type instanceof ParameterizedType ) { final ParameterizedType p = ( ParameterizedType ) type ; final Map < TypeVariable < ? > , Type > parameterizedTypeArguments ; if ( p . getOwnerType ( ) == null ) { parameterizedTypeArguments = typeArguments ; } else { parameterizedTypeArguments = new HashMap < > ( typeArguments ) ; parameterizedTypeArguments . putAll ( TypeUtils . getTypeArguments ( p ) ) ; } final Type [ ] args = p . getActualTypeArguments ( ) ; for ( int i = 0 ; i < args . length ; i ++ ) { final Type unrolled = unrollVariables ( parameterizedTypeArguments , args [ i ] ) ; if ( unrolled != null ) { args [ i ] = unrolled ; } } return parameterizeWithOwner ( p . getOwnerType ( ) , ( Class < ? > ) p . getRawType ( ) , args ) ; } if ( type instanceof WildcardType ) { final WildcardType wild = ( WildcardType ) type ; return wildcardType ( ) . withUpperBounds ( unrollBounds ( typeArguments , wild . getUpperBounds ( ) ) ) . withLowerBounds ( unrollBounds ( typeArguments , wild . getLowerBounds ( ) ) ) . build ( ) ; } } return type ;
public class GwtWebsocketsDemo { /** * This is the entry point method . */ public void onModuleLoad ( ) { } }
final Button sendButton = new Button ( "Send" ) ; final TextBox nameField = new TextBox ( ) ; nameField . setText ( "GWT User" ) ; final Label errorLabel = new Label ( ) ; final Label outputLabel = new Label ( ) ; final Element output = DOM . getElementById ( "output" ) ; final Element status = DOM . getElementById ( "status" ) ; final Console console = new Console ( ) ; console . log ( "adding websocket" ) ; // Establish a websocket communication channel to the atmosphere chat service . // Websocket socket = new Websocket ( " ws : / / localhost : 8080 / chat ? X - Atmosphere - tracking - id = 5ebed4c5-0b90-4166-88b2-9f273719ab75 & X - Atmosphere - Framework = 2.2.1 - jquery & X - Atmosphere - Transport = websocket & Content - Type = application / json & X - atmo - protocol = true " ) ; final String url = "ws://localhost:8080/stream" ; Websocket socket = new Websocket ( url ) ; socket . addListener ( new WebsocketListener ( ) { @ Override public void onClose ( ) { // do something on close } @ Override public void onMessage ( String msg ) { // a message is received console . log ( "onMessage(): " + msg ) ; outputLabel . setText ( "websocket " + url + ": " + msg ) ; output . setInnerText ( msg ) ; } @ Override public void onOpen ( ) { // do something on open console . log ( "onOpen()" ) ; status . setInnerText ( "connected: " + url ) ; } } ) ; socket . open ( ) ; console . log ( "websocket is open" ) ;
public class JschUtil { /** * 解除端口映射 * @ param session 需要解除端口映射的SSH会话 * @ param localPort 需要解除的本地端口 * @ return 解除成功与否 */ public static boolean unBindPort ( Session session , int localPort ) { } }
try { session . delPortForwardingL ( localPort ) ; return true ; } catch ( JSchException e ) { throw new JschRuntimeException ( e ) ; }
public class CalibrationDetectorChessboard2 { /** * This target is composed of a checkered chess board like squares . Each corner of an interior square * touches an adjacent square , but the sides are separated . Only interior square corners provide * calibration points . * @ param numRows Number of grid rows in the calibration target * @ param numCols Number of grid columns in the calibration target * @ param squareWidth How wide each square is . Units are target dependent . * @ return Target description */ public static List < Point2D_F64 > gridChess ( int numRows , int numCols , double squareWidth ) { } }
List < Point2D_F64 > all = new ArrayList < > ( ) ; // convert it into the number of calibration points numCols = numCols - 1 ; numRows = numRows - 1 ; // center the grid around the origin . length of a size divided by two double startX = - ( ( numCols - 1 ) * squareWidth ) / 2.0 ; double startY = - ( ( numRows - 1 ) * squareWidth ) / 2.0 ; for ( int i = numRows - 1 ; i >= 0 ; i -- ) { double y = startY + i * squareWidth ; for ( int j = 0 ; j < numCols ; j ++ ) { double x = startX + j * squareWidth ; all . add ( new Point2D_F64 ( x , y ) ) ; } } return all ;
public class MitigationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Mitigation mitigation , ProtocolMarshaller protocolMarshaller ) { } }
if ( mitigation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( mitigation . getMitigationName ( ) , MITIGATIONNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StringUtils { /** * Splits a String on word boundaries , yielding tokens that are all * " single words " ( see { @ link # isSingleWord ( String ) } ) or delimitors ( if * includeDelims is set to true ) * @ param value * the String to split * @ param includeDelims * whether or not to include the delimitors in the returned list * @ return a list containing words and delimitors . */ public static List < String > splitOnWordBoundaries ( final String value , final boolean includeDelims ) { } }
if ( value == null ) { return Collections . emptyList ( ) ; } return Arrays . asList ( WORD_BOUNDARY_PATTERN . split ( value ) ) ;
public class CorePlugin { /** * Cycles through all provided Elasticsearch URLs and returns one * @ return One of the provided Elasticsearch URLs */ public URL getElasticsearchUrl ( ) { } }
final List < URL > urls = elasticsearchUrls . getValue ( ) ; if ( urls . isEmpty ( ) ) { return null ; } final int index = accessesToElasticsearchUrl . getAndIncrement ( ) % urls . size ( ) ; URL elasticsearchURL = urls . get ( index ) ; final String defaultUsernameValue = elasticsearchDefaultUsername . getValue ( ) ; final String defaultPasswordValue = elasticsearchDefaultPassword . getValue ( ) ; if ( elasticsearchURL . getUserInfo ( ) == null && ! defaultUsernameValue . isEmpty ( ) && ! defaultPasswordValue . isEmpty ( ) ) { try { String username = URLEncoder . encode ( defaultUsernameValue , "UTF-8" ) ; String password = URLEncoder . encode ( defaultPasswordValue , "UTF-8" ) ; StringBuilder stringBuilder = new StringBuilder ( ) ; stringBuilder . append ( elasticsearchURL . getProtocol ( ) ) . append ( "://" ) . append ( username ) . append ( ":" ) . append ( password ) . append ( "@" ) . append ( elasticsearchURL . getHost ( ) ) . append ( ":" ) . append ( elasticsearchURL . getPort ( ) ) . append ( elasticsearchURL . getPath ( ) ) ; return new URL ( stringBuilder . toString ( ) ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } } return elasticsearchURL ;
public class GeneratedDContactDaoImpl { /** * query - by method for field lastName * @ param lastName the specified attribute * @ return an Iterable of DContacts for the specified lastName */ public Iterable < DContact > queryByLastName ( Object parent , java . lang . String lastName ) { } }
return queryByField ( parent , DContactMapper . Field . LASTNAME . getFieldName ( ) , lastName ) ;
public class JMessageClient { /** * Add members to chat room * @ param roomId chat room id * @ param members username array * @ return No content * @ throws APIConnectionException connect exception * @ throws APIRequestException request exception */ public ResponseWrapper addChatRoomMember ( long roomId , String ... members ) throws APIConnectionException , APIRequestException { } }
return _chatRoomClient . addChatRoomMember ( roomId , members ) ;
public class SwingUtil { /** * Adds a one pixel border of random color to this and all panels contained in this panel ' s * child hierarchy . */ public static void addDebugBorders ( JPanel panel ) { } }
Color bcolor = new Color ( _rando . nextInt ( 256 ) , _rando . nextInt ( 256 ) , _rando . nextInt ( 256 ) ) ; panel . setBorder ( BorderFactory . createLineBorder ( bcolor ) ) ; for ( int ii = 0 ; ii < panel . getComponentCount ( ) ; ii ++ ) { Object child = panel . getComponent ( ii ) ; if ( child instanceof JPanel ) { addDebugBorders ( ( JPanel ) child ) ; } }
public class Evaluator { /** * Gets the . * @ param < T > the generic type * @ param _ idx the idx * @ return the t * @ throws EFapsException on error */ @ SuppressWarnings ( "unchecked" ) public < T > T get ( final int _idx ) throws EFapsException { } }
initialize ( true ) ; Object ret = null ; final int idx = _idx - 1 ; if ( this . selection . getSelects ( ) . size ( ) > idx ) { final Select select = this . selection . getSelects ( ) . get ( idx ) ; ret = get ( select ) ; } return ( T ) ret ;
public class CmsSystemConfiguration { /** * VFS version history settings are set here . < p > * @ param historyEnabled if true the history is enabled * @ param historyVersions the maximum number of versions that are kept per VFS resource * @ param historyVersionsAfterDeletion the maximum number of versions for deleted resources */ public void setHistorySettings ( String historyEnabled , String historyVersions , String historyVersionsAfterDeletion ) { } }
m_historyEnabled = Boolean . valueOf ( historyEnabled ) . booleanValue ( ) ; m_historyVersions = Integer . valueOf ( historyVersions ) . intValue ( ) ; m_historyVersionsAfterDeletion = Integer . valueOf ( historyVersionsAfterDeletion ) . intValue ( ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_HISTORY_SETTINGS_3 , Boolean . valueOf ( m_historyEnabled ) , new Integer ( m_historyVersions ) , new Integer ( m_historyVersionsAfterDeletion ) ) ) ; }
public class LongStreamEx { /** * Returns a sequential ordered { @ code LongStreamEx } whose elements are the * values in the supplied { @ link java . nio . LongBuffer } . * The resulting stream covers only a portion of { @ code LongBuffer } content * which starts with { @ linkplain Buffer # position ( ) position } ( inclusive ) and * ends with { @ linkplain Buffer # limit ( ) limit } ( exclusive ) . Changes in * position and limit after the stream creation don ' t affect the stream . * The resulting stream does not change the internal { @ code LongBuffer } * state . * @ param buf the { @ code LongBuffer } to create a stream from * @ return the new stream * @ since 0.6.2 */ public static LongStreamEx of ( java . nio . LongBuffer buf ) { } }
return IntStreamEx . range ( buf . position ( ) , buf . limit ( ) ) . mapToLong ( buf :: get ) ;
public class ReflectionUtils { /** * Searches the classpath for all public concrete subtypes of the given interface or abstract class . * @ param type to search concrete subtypes of * @ param < T > the actual type to introspect * @ return a list of all concrete subtypes found */ public static < T > List < Class < ? > > getPublicConcreteSubTypesOf ( final Class < T > type ) { } }
return ClassGraphFacade . getPublicConcreteSubTypesOf ( type ) ;
public class Header { /** * getter for copyright - gets Copyright information , C * @ generated * @ return value of the feature */ public String getCopyright ( ) { } }
if ( Header_Type . featOkTst && ( ( Header_Type ) jcasType ) . casFeat_copyright == null ) jcasType . jcas . throwFeatMissing ( "copyright" , "de.julielab.jules.types.Header" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( Header_Type ) jcasType ) . casFeatCode_copyright ) ;
public class CardLoadSupport { /** * start load data for a card , usually called by { @ link TangramEngine } * @ param card the card need async loading data */ public void doLoad ( final Card card ) { } }
if ( mAsyncLoader == null ) { return ; } if ( ! card . loading && ! card . loaded ) { card . loading = true ; mAsyncLoader . loadData ( card , new AsyncLoader . LoadedCallback ( ) { @ Override public void finish ( ) { card . loading = false ; card . loaded = true ; } @ Override public void finish ( List < BaseCell > cells ) { finish ( ) ; card . addCells ( cells ) ; card . notifyDataChange ( ) ; } public void fail ( boolean loaded ) { card . loading = false ; card . loaded = loaded ; } } ) ; }
public class ExqlPatternImpl { /** * 执行转换 */ protected void execute ( ExqlContext context , ExprResolver exprResolver ) throws Exception { } }
// 转换语句内容 unit . fill ( context , exprResolver ) ; // 输出日志 if ( logger . isDebugEnabled ( ) ) { String flushOut = context . flushOut ( ) ; String args = Arrays . toString ( context . getArgs ( ) ) ; logger . debug ( "EXQL pattern executing:\n origin: " + pattern + "\n result: " + flushOut + "\n params: " + args ) ; }
public class JSParser { /** * Primitive : : = . . . all possible XML Schema built - in types */ final public JSPrimitive Primitive ( ) throws ParseException { } }
Token t ; JSPrimitive ans = new JSPrimitive ( ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case 13 : t = jj_consume_token ( 13 ) ; break ; case 14 : t = jj_consume_token ( 14 ) ; break ; case 15 : t = jj_consume_token ( 15 ) ; break ; case 16 : t = jj_consume_token ( 16 ) ; break ; case 17 : t = jj_consume_token ( 17 ) ; break ; case 18 : t = jj_consume_token ( 18 ) ; break ; case 19 : t = jj_consume_token ( 19 ) ; break ; case 20 : t = jj_consume_token ( 20 ) ; break ; case 21 : t = jj_consume_token ( 21 ) ; break ; case 22 : t = jj_consume_token ( 22 ) ; break ; case 23 : t = jj_consume_token ( 23 ) ; break ; case 24 : t = jj_consume_token ( 24 ) ; break ; case 25 : t = jj_consume_token ( 25 ) ; break ; case 26 : t = jj_consume_token ( 26 ) ; break ; case 27 : t = jj_consume_token ( 27 ) ; break ; case 28 : t = jj_consume_token ( 28 ) ; break ; case 29 : t = jj_consume_token ( 29 ) ; break ; case 30 : t = jj_consume_token ( 30 ) ; break ; case 31 : t = jj_consume_token ( 31 ) ; break ; case 32 : t = jj_consume_token ( 32 ) ; break ; case 33 : t = jj_consume_token ( 33 ) ; break ; case 34 : t = jj_consume_token ( 34 ) ; break ; case 35 : t = jj_consume_token ( 35 ) ; break ; case 36 : t = jj_consume_token ( 36 ) ; break ; case 37 : t = jj_consume_token ( 37 ) ; break ; case 38 : t = jj_consume_token ( 38 ) ; break ; case 39 : t = jj_consume_token ( 39 ) ; break ; case 40 : t = jj_consume_token ( 40 ) ; break ; case 41 : t = jj_consume_token ( 41 ) ; break ; case 42 : t = jj_consume_token ( 42 ) ; break ; case 43 : t = jj_consume_token ( 43 ) ; break ; case 44 : t = jj_consume_token ( 44 ) ; break ; case 45 : t = jj_consume_token ( 45 ) ; break ; case 46 : t = jj_consume_token ( 46 ) ; break ; case 47 : t = jj_consume_token ( 47 ) ; break ; case 48 : t = jj_consume_token ( 48 ) ; break ; case 49 : t = jj_consume_token ( 49 ) ; break ; case 50 : t = jj_consume_token ( 50 ) ; break ; case 51 : t = jj_consume_token ( 51 ) ; break ; case 52 : t = jj_consume_token ( 52 ) ; break ; case 53 : t = jj_consume_token ( 53 ) ; break ; case 54 : t = jj_consume_token ( 54 ) ; break ; case 55 : t = jj_consume_token ( 55 ) ; break ; case 56 : t = jj_consume_token ( 56 ) ; break ; case 57 : t = jj_consume_token ( 57 ) ; break ; case 58 : t = jj_consume_token ( 58 ) ; break ; case 59 : t = jj_consume_token ( 59 ) ; break ; default : jj_la1 [ 6 ] = jj_gen ; jj_consume_token ( - 1 ) ; throw new ParseException ( ) ; } ans . setXSDTypeName ( t . image ) ; { if ( true ) return ans ; } throw new Error ( "Missing return statement in function" ) ;
public class SegmentIntegration { /** * Create a { @ link QueueFile } in the given folder with the given name . If the underlying file is * somehow corrupted , we ' ll delete it , and try to recreate the file . This method will throw an * { @ link IOException } if the directory doesn ' t exist and could not be created . */ static QueueFile createQueueFile ( File folder , String name ) throws IOException { } }
createDirectory ( folder ) ; File file = new File ( folder , name ) ; try { return new QueueFile ( file ) ; } catch ( IOException e ) { // noinspection ResultOfMethodCallIgnored if ( file . delete ( ) ) { return new QueueFile ( file ) ; } else { throw new IOException ( "Could not create queue file (" + name + ") in " + folder + "." ) ; } }
public class RequestFromVertx { /** * Gets the ' raw ' body . * @ return the raw body , { @ code null } if there is no body . */ public String getRawBodyAsString ( ) { } }
if ( raw == null ) { return null ; } return raw . toString ( Charsets . UTF_8 . displayName ( ) ) ;
public class ConverterConfiguration { /** * Resolves and returns name of the type given to provided class . * @ param clazz { @ link Class } to resolve type name for * @ return type name or < code > null < / code > if type was not registered */ public String getTypeName ( Class < ? > clazz ) { } }
Type type = typeAnnotations . get ( clazz ) ; if ( type != null ) { return type . value ( ) ; } return null ;
public class DefaultPageErrorHandler { /** * The interception method . When the request is unbound , generate a 404 page . When the controller throws an * exception generates a 500 page . * @ param route the route * @ param context the filter context * @ return the generated result . * @ throws Exception if anything bad happen */ @ Override public Result call ( Route route , RequestContext context ) throws Exception { } }
// Manage the error file . // In dev mode , if the watching pipeline throws an error , this error is stored in the error . json file // If this file exist , we should display a page telling the user that something terrible happened in his last // change . if ( configuration . isDev ( ) && context . request ( ) . accepts ( MimeTypes . HTML ) && pipeline != null ) { // Check whether the error file is there File error = getFirstErrorFile ( ) ; if ( error != null ) { logger ( ) . debug ( "Error file detected, preparing rendering" ) ; try { return renderPipelineError ( error ) ; } catch ( IOException e ) { LOGGER . error ( "An exception occurred while generating the error page for {} {}" , route . getHttpMethod ( ) , route . getUrl ( ) , e ) ; return renderInternalError ( context . context ( ) , route , e ) ; } } } try { Result result = context . proceed ( ) ; if ( result . getStatusCode ( ) == NOT_FOUND && result . getRenderable ( ) instanceof NoHttpBody ) { // HEAD Implementation . if ( route . getHttpMethod ( ) == HttpMethod . HEAD ) { return switchToGet ( route , context ) ; } return renderNotFound ( route , result ) ; } return result ; } catch ( InvocationTargetException e ) { Throwable cause = e . getCause ( ) ; LOGGER . error ( "An exception occurred while processing request {} {}" , route . getHttpMethod ( ) , route . getUrl ( ) , cause ) ; // if it is and the cause is a HTTP Exception , return that one if ( cause instanceof HttpException ) { // If we catch a HTTP Exception , just return the built result . LOGGER . error ( "A HTTP exception occurred while processing request {} {}" , route . getHttpMethod ( ) , route . getUrl ( ) , e ) ; return ( ( HttpException ) cause ) . toResult ( ) ; } // if we have a mapper for that exception , use it . for ( ExceptionMapper mapper : mappers ) { if ( mapper . getExceptionClass ( ) . equals ( cause . getClass ( ) ) ) { // We can safely cast here , as we have the previous class check ; // noinspection unchecked return mapper . toResult ( ( Exception ) cause ) ; } } return renderInternalError ( context . context ( ) , route , e ) ; } catch ( Exception e ) { LOGGER . error ( "An exception occurred while processing request {} {}" , route . getHttpMethod ( ) , route . getUrl ( ) , e ) ; Throwable cause = e . getCause ( ) ; // if we have a mapper for that exception , use it . for ( ExceptionMapper mapper : mappers ) { if ( mapper . getExceptionClass ( ) . equals ( cause . getClass ( ) ) ) { // We can safely cast here , as we have the previous class check ; // noinspection unchecked return mapper . toResult ( ( Exception ) cause ) ; } } // Used when it ' s not an invocation target exception , or when it is one but we don ' t have custom action // to handle it . return renderInternalError ( context . context ( ) , route , e ) ; }
public class VirtualNetworkTapsInner { /** * Creates or updates a Virtual Network Tap . * @ param resourceGroupName The name of the resource group . * @ param tapName The name of the virtual network tap . * @ param parameters Parameters supplied to the create or update virtual network tap operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < VirtualNetworkTapInner > createOrUpdateAsync ( String resourceGroupName , String tapName , VirtualNetworkTapInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , tapName , parameters ) . map ( new Func1 < ServiceResponse < VirtualNetworkTapInner > , VirtualNetworkTapInner > ( ) { @ Override public VirtualNetworkTapInner call ( ServiceResponse < VirtualNetworkTapInner > response ) { return response . body ( ) ; } } ) ;
public class HtmlAdaptorServlet { /** * Display all MBeans * @ param request The HTTP request * @ param response The HTTP response * @ exception ServletException Thrown if an error occurs * @ exception IOException Thrown if an I / O error occurs */ private void displayMBeans ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
Iterator mbeans ; try { mbeans = getDomainData ( ) ; } catch ( Exception e ) { throw new ServletException ( "Failed to get MBeans" , e ) ; } request . setAttribute ( "mbeans" , mbeans ) ; RequestDispatcher rd = this . getServletContext ( ) . getRequestDispatcher ( "/displaymbeans.jsp" ) ; rd . forward ( request , response ) ;
public class LongStream { /** * Returns an { @ code DoubleStream } consisting of the results of applying the given * function to the elements of this stream . * < p > This is an intermediate operation . * @ param mapper the mapper function used to apply to each element * @ return the new { @ code DoubleStream } */ @ NotNull public DoubleStream mapToDouble ( @ NotNull final LongToDoubleFunction mapper ) { } }
return new DoubleStream ( params , new LongMapToDouble ( iterator , mapper ) ) ;
public class UIValidateForm { /** * Getters & Setters */ public String getFields ( ) { } }
StateHelper helper = this . getStateHelper ( true ) ; return ( String ) helper . get ( FIELDS_KEY ) ;
public class SwiftDocEscaping { /** * Return a deprecated attribute , as a string of Swift source . * @ param deprecatedProp provides the deprecated Pegasus schema property , if any . May be a * Boolean , a String , or null . * @ return a Swift property indicating that a type or field is deprecated . */ public static String deprecatedToString ( Object deprecatedProp ) { } }
boolean emptyDeprecated = ( deprecatedProp == null ) ; if ( emptyDeprecated ) { return "" ; } else if ( deprecatedProp instanceof String ) { return "@available(*, deprecated, message=\"" + deprecatedProp + "\")" ; } else { return "@available(*, deprecated)" ; }
public class ProcessorLRE { /** * Receive notification of the end of an element . * @ param handler non - null reference to current StylesheetHandler that is constructing the Templates . * @ param uri The Namespace URI , or an empty string . * @ param localName The local name ( without prefix ) , or empty string if not namespace processing . * @ param rawName The qualified name ( with prefix ) . */ public void endElement ( StylesheetHandler handler , String uri , String localName , String rawName ) throws org . xml . sax . SAXException { } }
ElemTemplateElement elem = handler . getElemTemplateElement ( ) ; if ( elem instanceof ElemLiteralResult ) { if ( ( ( ElemLiteralResult ) elem ) . getIsLiteralResultAsStylesheet ( ) ) { handler . popStylesheet ( ) ; } } super . endElement ( handler , uri , localName , rawName ) ;
public class ConfirmPublicVirtualInterfaceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ConfirmPublicVirtualInterfaceRequest confirmPublicVirtualInterfaceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( confirmPublicVirtualInterfaceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( confirmPublicVirtualInterfaceRequest . getVirtualInterfaceId ( ) , VIRTUALINTERFACEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WarpGroupImpl { /** * ( non - Javadoc ) * @ see org . jboss . arquillian . warp . impl . client . execution . WarpGroup # pushResponsePayload ( org . jboss . arquillian . warp . impl . shared . ResponsePayload ) */ public boolean pushResponsePayload ( ResponsePayload responsePayload ) { } }
if ( payloads . containsKey ( responsePayload . getSerialId ( ) ) ) { payloads . put ( responsePayload . getSerialId ( ) , responsePayload ) ; return true ; } return false ;
public class VolumeStatusInfo { /** * The details of the volume status . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDetails ( java . util . Collection ) } or { @ link # withDetails ( java . util . Collection ) } if you want to override * the existing values . * @ param details * The details of the volume status . * @ return Returns a reference to this object so that method calls can be chained together . */ public VolumeStatusInfo withDetails ( VolumeStatusDetails ... details ) { } }
if ( this . details == null ) { setDetails ( new com . amazonaws . internal . SdkInternalList < VolumeStatusDetails > ( details . length ) ) ; } for ( VolumeStatusDetails ele : details ) { this . details . add ( ele ) ; } return this ;
public class LWJGL3TypeConversions { /** * Convert faces from GL constants . * @ param face The GL constant . * @ return The value . */ public static JCGLCubeMapFaceLH cubeFaceFromGL ( final int face ) { } }
switch ( face ) { case GL13 . GL_TEXTURE_CUBE_MAP_NEGATIVE_X : return JCGLCubeMapFaceLH . CUBE_MAP_LH_NEGATIVE_X ; case GL13 . GL_TEXTURE_CUBE_MAP_POSITIVE_X : return JCGLCubeMapFaceLH . CUBE_MAP_LH_POSITIVE_X ; case GL13 . GL_TEXTURE_CUBE_MAP_POSITIVE_Y : return JCGLCubeMapFaceLH . CUBE_MAP_LH_POSITIVE_Y ; case GL13 . GL_TEXTURE_CUBE_MAP_NEGATIVE_Y : return JCGLCubeMapFaceLH . CUBE_MAP_LH_NEGATIVE_Y ; case GL13 . GL_TEXTURE_CUBE_MAP_NEGATIVE_Z : return JCGLCubeMapFaceLH . CUBE_MAP_LH_NEGATIVE_Z ; case GL13 . GL_TEXTURE_CUBE_MAP_POSITIVE_Z : return JCGLCubeMapFaceLH . CUBE_MAP_LH_POSITIVE_Z ; default : throw new UnreachableCodeException ( ) ; }
public class DbUtil { /** * 设置全局配置 : 是否通过debug日志显示SQL * @ param isShowSql 是否显示SQL * @ param isFormatSql 是否格式化显示的SQL * @ param isShowParams 是否打印参数 * @ param level SQL打印到的日志等级 * @ since 4.1.7 */ public static void setShowSqlGlobal ( boolean isShowSql , boolean isFormatSql , boolean isShowParams , Level level ) { } }
SqlLog . INSTASNCE . init ( isShowSql , isFormatSql , isShowParams , level ) ;
public class RowAVLDisk { /** * Sets the file position for the row * @ param pos position in data file */ public void setPos ( int pos ) { } }
position = pos ; NodeAVL n = nPrimaryNode ; while ( n != null ) { ( ( NodeAVLDisk ) n ) . iData = position ; n = n . nNext ; }
public class Param { /** * Services the page fragment . This version simply prints the value of * this parameter to teh PageContext ' s out . */ public void service ( PageContext pContext ) throws ServletException , IOException { } }
JspWriter writer = pContext . getOut ( ) ; writer . print ( value ) ;
public class SoitoolkitLoggerModule { /** * Log processor for level ERROR * { @ sample . xml . . / . . / . . / doc / SoitoolkitLogger - connector . xml . sample soitoolkitlogger : log } * @ param message Log - message to be processed * @ param integrationScenario Optional name of the integration scenario or business process * @ param contractId Optional name of the contract in use * @ param correlationId Optional correlation identity of the message * @ param extra Optional extra info * @ return The incoming payload */ @ Processor public Object logError ( String message , @ Optional String integrationScenario , @ Optional String contractId , @ Optional String correlationId , @ Optional Map < String , String > extra ) { } }
return doLog ( LogLevelType . ERROR , message , integrationScenario , contractId , correlationId , extra ) ;
public class PngOptimizerTask { private void convert ( ) { } }
long start = System . currentTimeMillis ( ) ; PngOptimizer optimizer = new PngOptimizer ( logLevel ) ; optimizer . setCompressor ( compressor , iterations ) ; optimizer . setGenerateDataUriCss ( generateDataUriCss ) ; for ( FileSet fileset : filesets ) { DirectoryScanner ds = fileset . getDirectoryScanner ( getProject ( ) ) ; for ( String src : ds . getIncludedFiles ( ) ) { String inputPath = fileset . getDir ( ) + "/" + src ; String outputPath ; try { String outputDir = ( toDir == null ) ? fileset . getDir ( ) . getCanonicalPath ( ) : toDir ; outputPath = outputDir + "/" + src ; // make the directory this file is in ( for nested dirs in a * * / * fileset ) makeDirs ( outputPath . substring ( 0 , outputPath . lastIndexOf ( '/' ) ) ) ; PngImage image = new PngImage ( inputPath , logLevel ) ; optimizer . optimize ( image , outputPath + fileSuffix , removeGamma , compressionLevel ) ; } catch ( Exception e ) { log ( String . format ( "Problem optimizing %s. Caught %s" , inputPath , e . getMessage ( ) ) ) ; } } } log ( String . format ( "Processed %d files in %d milliseconds, saving %d bytes" , optimizer . getResults ( ) . size ( ) , System . currentTimeMillis ( ) - start , optimizer . getTotalSavings ( ) ) ) ; if ( generateDataUriCss ) { try { optimizer . generateDataUriCss ( toDir ) ; } catch ( IOException e ) { } }
public class Transfer { /** * Run with - - help arg for syntax help . * @ throws IllegalArgumentException for the obvious reason */ public static void main ( String [ ] arg ) { } }
System . getProperties ( ) . put ( "sun.java2d.noddraw" , "true" ) ; bMustExit = true ; try { work ( arg ) ; } catch ( IllegalArgumentException iae ) { throw new IllegalArgumentException ( "Try: java " + Transfer . class . getName ( ) + " --help" ) ; }
public class ClosestPointPathShadow2ai { /** * Determine where the segment is crossing the two shadow lines . * @ param shadowX0 x coordinate of the reference point of the first shadow line . * @ param shadowY0 y coordinate of the reference point of the first shadow line . * @ param shadowX1 x coordinate of the reference point of the second shadow line . * @ param shadowY1 y coordinate of the reference point of the second shadow line . * @ param sx0 x coordinate of the first point of the segment . * @ param sy0 y coordinate of the first point of the segment . * @ param sx1 x coordinate of the second point of the segment . * @ param sy1 y coordinate of the second point of the segment . */ @ SuppressWarnings ( { } }
"checkstyle:parameternumber" , "checkstyle:cyclomaticcomplexity" , "checkstyle:npathcomplexity" } ) private void crossSegmentTwoShadowLines ( int shadowX0 , int shadowY0 , int shadowX1 , int shadowY1 , int sx0 , int sy0 , int sx1 , int sy1 ) { // Update the global bounds of the shadow . final int shadowYmin = Math . min ( shadowY0 , shadowY1 ) ; final int shadowYmax = Math . max ( shadowY0 , shadowY1 ) ; if ( shadowYmin != this . boundingMinY && shadowYmax != this . boundingMaxY ) { // Shadow is not contributing to the crossing computation . return ; } if ( sy0 < shadowYmin && sy1 < shadowYmin ) { // The segment is entirely at the bottom of the shadow . return ; } if ( sy0 > shadowYmax && sy1 > shadowYmax ) { // The segment is entirely at the top of the shadow . return ; } final int shadowXmin = Math . min ( shadowX0 , shadowX1 ) ; final int shadowXmax = Math . max ( shadowX0 , shadowX1 ) ; if ( sx0 < shadowXmin && sx1 < shadowXmin ) { // The segment is entirely at the left of the shadow . return ; } if ( sx0 > shadowXmax && sx1 > shadowXmax ) { // The line is entirely at the right of the shadow final OutputParameter < Integer > param = new OutputParameter < > ( ) ; if ( shadowYmin == shadowYmax ) { final int cross = this . crossings ; this . crossings = Segment2ai . calculatesCrossingsAndXPointShadowSegment ( cross , shadowXmax , shadowYmin , sx0 , sy0 , sx1 , sy1 , shadowYmin == this . boundingMinY , shadowYmax == this . boundingMaxY , param ) ; if ( cross != this . crossings ) { final int xintercept = param . get ( ) . intValue ( ) ; setCrossingCoordinateForYMax ( xintercept , shadowYmin ) ; setCrossingCoordinateForYMin ( xintercept , shadowYmin ) ; this . crossings = cross ; } } else { if ( shadowYmin == this . boundingMinY ) { final int cross = Segment2ai . calculatesCrossingsAndXPointShadowSegment ( this . crossings , shadowXmax , shadowYmin , sx0 , sy0 , sx1 , sy1 , shadowYmin == this . boundingMinY , false , param ) ; if ( cross != this . crossings ) { final int xintercept = param . get ( ) . intValue ( ) ; setCrossingCoordinateForYMax ( xintercept , shadowYmin ) ; setCrossingCoordinateForYMin ( xintercept , shadowYmin ) ; this . crossings = cross ; } } if ( shadowYmax == this . boundingMaxY ) { final int cross = Segment2ai . calculatesCrossingsAndXPointShadowSegment ( this . crossings , shadowXmax , shadowYmax , sx0 , sy0 , sx1 , sy1 , false , shadowYmax == this . boundingMaxY , param ) ; if ( cross != this . crossings ) { final int xintercept = param . get ( ) . intValue ( ) ; setCrossingCoordinateForYMax ( xintercept , shadowYmax ) ; setCrossingCoordinateForYMin ( xintercept , shadowYmax ) ; this . crossings = cross ; } } } } else if ( Segment2ai . intersectsSegmentSegment ( shadowX0 , shadowY0 , shadowX1 , shadowY1 , sx0 , sy0 , sx1 , sy1 ) ) { // The segment is intersecting the shadowed segment . this . crossings = GeomConstants . SHAPE_INTERSECTS ; } else { final int side1 ; final int side2 ; if ( shadowY0 <= shadowY1 ) { side1 = Segment2ai . findsSideLinePoint ( shadowX0 , shadowY0 , shadowX1 , shadowY1 , sx0 , sy0 ) ; side2 = Segment2ai . findsSideLinePoint ( shadowX0 , shadowY0 , shadowX1 , shadowY1 , sx1 , sy1 ) ; } else { side1 = Segment2ai . findsSideLinePoint ( shadowX1 , shadowY1 , shadowX0 , shadowY0 , sx0 , sy0 ) ; side2 = Segment2ai . findsSideLinePoint ( shadowX1 , shadowY1 , shadowX0 , shadowY0 , sx1 , sy1 ) ; } if ( side1 >= 0 || side2 >= 0 ) { final int x0 ; final int x1 ; if ( shadowYmin == shadowY0 ) { x0 = shadowX0 ; x1 = shadowX1 ; } else { x0 = shadowX1 ; x1 = shadowX0 ; } crossSegmentShadowLine ( x0 , shadowYmin , sx0 , sy0 , sx1 , sy1 , true , false ) ; crossSegmentShadowLine ( x1 , shadowYmax , sx0 , sy0 , sx1 , sy1 , false , true ) ; } }
public class CollectionNamingConfusion { /** * overrides the visitor to look for local variables where the name has ' Map ' , ' Set ' , ' List ' in it but the type of that field isn ' t that . note that this * only is useful if compiled with debug labels . * @ param obj * the currently parsed method */ @ Override public void visitMethod ( Method obj ) { } }
LocalVariableTable lvt = obj . getLocalVariableTable ( ) ; if ( lvt != null ) { LocalVariable [ ] lvs = lvt . getLocalVariableTable ( ) ; for ( LocalVariable lv : lvs ) { if ( checkConfusedName ( lv . getName ( ) , lv . getSignature ( ) ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . CNC_COLLECTION_NAMING_CONFUSION . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addString ( lv . getName ( ) ) . addSourceLine ( this . clsContext , this , lv . getStartPC ( ) ) ) ; } } }
public class JvmTypesBuilder { /** * Adds or removes the annotation { @ link Extension @ Extension } from the given parameter . If the annotation is * already present , nothing is done if { @ code value } is { @ code true } . If it is not present and { @ code value } * is { @ code false } , this is a no - op , too . * @ param parameter the parameter that will be processed * @ param sourceElement the context that shall be used to lookup the { @ link Extension annotation type } . * @ param value < code > true < / code > if the parameter shall be marked as extension , < code > false < / code > if it should be unmarked . */ public void setExtension ( /* @ Nullable */ JvmFormalParameter parameter , EObject sourceElement , boolean value ) { } }
if ( parameter == null ) return ; internalSetExtension ( parameter , sourceElement , value ) ;
public class JsonConfig { /** * Removes a JsonValueProcessor . < br > * [ Java - & gt ; JSON ] * @ param beanClass the class to which the property may belong * @ param key the name of the property which may belong to the target class */ public void unregisterJsonValueProcessor ( Class beanClass , String key ) { } }
if ( beanClass != null && key != null ) { beanKeyMap . remove ( beanClass , key ) ; }
public class Encodings { /** * Determines if the encoding specified was recognized by the * serializer or not . * @ param encoding The encoding * @ return boolean - true if the encoding was recognized else false */ public static boolean isRecognizedEncoding ( String encoding ) { } }
EncodingInfo ei ; String normalizedEncoding = encoding . toUpperCase ( ) ; ei = ( EncodingInfo ) _encodingTableKeyJava . get ( normalizedEncoding ) ; if ( ei == null ) ei = ( EncodingInfo ) _encodingTableKeyMime . get ( normalizedEncoding ) ; if ( ei != null ) return true ; return false ;
public class BulkMigrateChangelogCommand { /** * - - - - - private methods - - - - - */ private void handleObject ( final GraphObject obj ) { } }
final PropertyContainer propertyContainer = obj . getPropertyContainer ( ) ; final String changeLogName = "structrChangeLog" ; if ( propertyContainer . hasProperty ( changeLogName ) ) { final Object changeLogSource = propertyContainer . getProperty ( changeLogName ) ; if ( changeLogSource instanceof String ) { final String existingChangeLog = ( String ) changeLogSource ; if ( StringUtils . isNotBlank ( existingChangeLog ) ) { if ( writeChangelogToDisk ( obj , existingChangeLog ) ) { // remove data in case of success propertyContainer . removeProperty ( changeLogName ) ; } } } }
public class RepositoryApplicationConfiguration { /** * { @ link JpaSoftwareModuleManagement } bean . * @ return a new { @ link SoftwareModuleManagement } */ @ Bean @ ConditionalOnMissingBean SoftwareModuleManagement softwareModuleManagement ( final EntityManager entityManager , final DistributionSetRepository distributionSetRepository , final SoftwareModuleRepository softwareModuleRepository , final SoftwareModuleMetadataRepository softwareModuleMetadataRepository , final SoftwareModuleTypeRepository softwareModuleTypeRepository , final NoCountPagingRepository criteriaNoCountDao , final AuditorAware < String > auditorProvider , final ArtifactManagement artifactManagement , final QuotaManagement quotaManagement , final VirtualPropertyReplacer virtualPropertyReplacer , final JpaProperties properties ) { } }
return new JpaSoftwareModuleManagement ( entityManager , distributionSetRepository , softwareModuleRepository , softwareModuleMetadataRepository , softwareModuleTypeRepository , criteriaNoCountDao , auditorProvider , artifactManagement , quotaManagement , virtualPropertyReplacer , properties . getDatabase ( ) ) ;
public class UriSourceSupplier { /** * Validates that { @ link URI } expressed as { @ link String } is of proper * format and could be converted to an instance of the { @ link URI } . */ public static boolean isURI ( String source ) { } }
Pattern pattern = Pattern . compile ( "^[a-zA-Z0-9\\-_]+:" ) ; return pattern . matcher ( source ) . find ( ) ;
public class JSONObject { /** * Get the int value associated with a key . * @ param key * A key string . * @ return The integer value . * @ throws JSONException * if the key is not found or if the value cannot be converted * to an integer . */ public int getInt ( String key ) throws JSONException { } }
Object object = this . get ( key ) ; try { return object instanceof Number ? ( ( Number ) object ) . intValue ( ) : Integer . parseInt ( ( String ) object ) ; } catch ( Exception e ) { throw new JSONException ( "JSONObject[" + quote ( key ) + "] is not an int." ) ; }
public class Timestamp { /** * Returns a timestamp relative to this one by the given number of minutes . * This method always returns a Timestamp with at least MINUTE precision . * For example , adding one minute to { @ code 2011T } results in * { @ code 2011-01-01T00:01-00:00 } . To receive a Timestamp that always * maintains the same precision as the original , use { @ link # adjustMinute ( int ) } . * @ param amount a number of minutes . */ public final Timestamp addMinute ( int amount ) { } }
long delta = ( long ) amount * 60 * 1000 ; return addMillisForPrecision ( delta , Precision . MINUTE , false ) ;
public class HttpUtil { /** * Fetch charset from Content - Type header value . * @ param contentTypeValue Content - Type header value to parse * @ return the charset from message ' s Content - Type header or { @ link CharsetUtil # ISO _ 8859_1} * if charset is not presented or unparsable */ public static Charset getCharset ( CharSequence contentTypeValue ) { } }
if ( contentTypeValue != null ) { return getCharset ( contentTypeValue , CharsetUtil . ISO_8859_1 ) ; } else { return CharsetUtil . ISO_8859_1 ; }
public class DescribeConfigRulesResult { /** * The details about your AWS Config rules . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setConfigRules ( java . util . Collection ) } or { @ link # withConfigRules ( java . util . Collection ) } if you want to * override the existing values . * @ param configRules * The details about your AWS Config rules . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeConfigRulesResult withConfigRules ( ConfigRule ... configRules ) { } }
if ( this . configRules == null ) { setConfigRules ( new com . amazonaws . internal . SdkInternalList < ConfigRule > ( configRules . length ) ) ; } for ( ConfigRule ele : configRules ) { this . configRules . add ( ele ) ; } return this ;
public class GenericCsvInputFormat { @ Override public void open ( FileInputSplit split ) throws IOException { } }
super . open ( split ) ; // instantiate the parsers FieldParser < ? > [ ] parsers = new FieldParser < ? > [ fieldTypes . length ] ; for ( int i = 0 ; i < fieldTypes . length ; i ++ ) { if ( fieldTypes [ i ] != null ) { Class < ? extends FieldParser < ? > > parserType = FieldParser . getParserForType ( fieldTypes [ i ] ) ; if ( parserType == null ) { throw new RuntimeException ( "No parser available for type '" + fieldTypes [ i ] . getName ( ) + "'." ) ; } FieldParser < ? > p = InstantiationUtil . instantiate ( parserType , FieldParser . class ) ; p . setCharset ( getCharset ( ) ) ; if ( this . quotedStringParsing ) { if ( p instanceof StringParser ) { ( ( StringParser ) p ) . enableQuotedStringParsing ( this . quoteCharacter ) ; } else if ( p instanceof StringValueParser ) { ( ( StringValueParser ) p ) . enableQuotedStringParsing ( this . quoteCharacter ) ; } } parsers [ i ] = p ; } } this . fieldParsers = parsers ; // skip the first line , if we are at the beginning of a file and have the option set if ( this . skipFirstLineAsHeader && this . splitStart == 0 ) { readLine ( ) ; // read and ignore }
public class GenericResponseBuilder { /** * Replaces all of the headers with the these headers . * @ param headers the new headers to be used , { @ code null } to remove all existing headers * @ return this builder */ public GenericResponseBuilder < T > replaceAll ( MultivaluedMap < String , Object > headers ) { } }
rawBuilder . replaceAll ( headers ) ; return this ;
public class MapMakerInternalMap { /** * Guarded By Segment . this */ @ VisibleForTesting ValueReference < K , V > newValueReference ( ReferenceEntry < K , V > entry , V value ) { } }
int hash = entry . getHash ( ) ; return valueStrength . referenceValue ( segmentFor ( hash ) , entry , value ) ;
public class ResolutionPreference { /** * Parses a specific textual representation of a resolution and returns its dimensions . * @ param context * The context , which should be used , as an instance of the class { @ link Context } . The * context may not be null * @ param resolution * The textual representation of the resolution , which should be parsed , as a { @ link * String } . The resolution may neither be null , nor empty * @ return A pair , which contains the width and height of the given resolution , as an instance * of the class { @ link Pair } */ public static Pair < Integer , Integer > parseResolution ( @ NonNull final Context context , @ NonNull final String resolution ) { } }
Condition . INSTANCE . ensureNotNull ( context , "The context may not be null" ) ; Condition . INSTANCE . ensureNotNull ( resolution , "The resolution may not be null" ) ; Condition . INSTANCE . ensureNotEmpty ( resolution , "The resolution may not be empty" ) ; String separator = context . getString ( R . string . resolution_preference_separator ) ; String [ ] dimensions = resolution . split ( separator ) ; if ( dimensions . length != 2 ) { throw new IllegalArgumentException ( "Malformed resolution: " + resolution ) ; } try { int width = Integer . parseInt ( dimensions [ 0 ] ) ; int height = Integer . parseInt ( dimensions [ 1 ] ) ; return Pair . create ( width , height ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( "Resolution contains invalid dimension: " + resolution , e ) ; }
public class SystemParameter { /** * < pre > * Define the URL query parameter name to use for the parameter . It is case * sensitive . * < / pre > * < code > string url _ query _ parameter = 3 ; < / code > */ public com . google . protobuf . ByteString getUrlQueryParameterBytes ( ) { } }
java . lang . Object ref = urlQueryParameter_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; urlQueryParameter_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
public class UpdateCsvClassifierRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateCsvClassifierRequest updateCsvClassifierRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateCsvClassifierRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateCsvClassifierRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getDelimiter ( ) , DELIMITER_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getQuoteSymbol ( ) , QUOTESYMBOL_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getContainsHeader ( ) , CONTAINSHEADER_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getHeader ( ) , HEADER_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getDisableValueTrimming ( ) , DISABLEVALUETRIMMING_BINDING ) ; protocolMarshaller . marshall ( updateCsvClassifierRequest . getAllowSingleColumn ( ) , ALLOWSINGLECOLUMN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class POJOHelper { /** * Sets common structure to JSON objects for specific elements * " key " : " myKey2 " , * " keyType " : " java . lang . String " , * " value " : " java . lang . String " * @ param obj objects to be added here * @ return OrderedJSONObject */ private static OrderedJSONObject setCommonKeyTypeObject ( OrderedJSONObject obj ) { } }
obj . put ( "key" , "myKey2" ) ; obj . put ( "keyType" , "java.lang.String" ) ; obj . put ( "value" , "java.lang.String" ) ; return obj ;
public class LibraryPackageExporter { /** * Does a refreshBundles call and waits for the async operation to complete before returning . */ private void refresh ( Collection < Bundle > bundles ) { } }
if ( FrameworkState . isStopping ( ) ) { // do nothing ; system is shutting down removal pendings will be removed automatically return ; } Bundle system = context . getBundle ( Constants . SYSTEM_BUNDLE_LOCATION ) ; FrameworkWiring fwkWiring = system . adapt ( FrameworkWiring . class ) ; final CountDownLatch refreshed = new CountDownLatch ( 1 ) ; fwkWiring . refreshBundles ( bundles , new FrameworkListener ( ) { @ Override public void frameworkEvent ( FrameworkEvent event ) { refreshed . countDown ( ) ; } } ) ; try { // only wait for 30 seconds refreshed . await ( 30 , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { // not really expected ; auto - FFDC is ok // keep thread interrupted Thread . interrupted ( ) ; }
public class Error { /** * Thrown when the xml configuration doesn ' t exist . * @ param destination destination class name * @ param source source class name */ public static void classesNotConfigured ( Class < ? > destination , Class < ? > source ) { } }
throw new MappingNotFoundException ( MSG . INSTANCE . message ( Constants . mappingNotFoundException2 , destination . getSimpleName ( ) , source . getSimpleName ( ) ) ) ;
public class BufferUtil { /** * Get the array from a read - only { @ link ByteBuffer } similar to { @ link ByteBuffer # array ( ) } . * @ param buffer that wraps the underlying array . * @ return the underlying array . */ public static byte [ ] array ( final ByteBuffer buffer ) { } }
if ( buffer . isDirect ( ) ) { throw new IllegalArgumentException ( "buffer must wrap an array" ) ; } return ( byte [ ] ) UNSAFE . getObject ( buffer , BYTE_BUFFER_HB_FIELD_OFFSET ) ;
public class RestController { /** * Notifies all waiting signal work items of the given workflow instance and the given signal name . * Technically , sets the work item ' s result value and updates the status to EXECUTED . * < pre > * Request : POST / workflowInstance / 1 / signal / invoice { argument : { refNum : 3 , invoiceAmount : " 10 Euro " } } * Response : NO _ CONTENT * < / pre > */ @ RequestMapping ( method = RequestMethod . POST , value = "/workflowInstance/{woinRefNum}/signal/{signal}" , produces = { } }
MediaType . APPLICATION_JSON_VALUE , MediaType . TEXT_XML_VALUE } ) public ResponseEntity < Void > sendSignal ( @ PathVariable long woinRefNum , @ PathVariable String signal , @ RequestBody String argument ) { facade . sendSignalToWorkflowInstance ( woinRefNum , signal , JsonUtil . deserialize ( argument ) ) ; return new ResponseEntity < > ( HttpStatus . NO_CONTENT ) ;