signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class IdleConnectionRemover { /** * Set the executor service
* @ param v The value */
public void setExecutorService ( ExecutorService v ) { } } | if ( v != null ) { executorService = v ; isExternal = true ; } else { executorService = null ; isExternal = false ; } |
public class AzureAffinityGroupSupport { /** * Creates an affinity group in the cloud
* @ param options the options used when creating the affinity group
* @ return the provider ID of the affinity group
* @ throws org . dasein . cloud . InternalException an error occurred within the Dasein Cloud implementation creating the affinity group
* @ throws org . dasein . cloud . CloudException an error occurred within the service provider creating the affinity group */
@ Nonnull @ Override public AffinityGroup create ( @ Nonnull AffinityGroupCreateOptions options ) throws InternalException , CloudException { } } | if ( options == null || options . getName ( ) == null ) throw new InternalException ( "Cannot create AffinityGroup. Create options or affinity group name cannot be null." ) ; CreateAffinityGroupModel createAffinityGroupModel = new CreateAffinityGroupModel ( ) ; createAffinityGroupModel . setName ( options . getName ( ) ) ; createAffinityGroupModel . setDescription ( options . getDescription ( ) ) ; createAffinityGroupModel . setLocation ( provider . getContext ( ) . getRegionId ( ) ) ; createAffinityGroupModel . setLabel ( new String ( Base64 . encodeBase64 ( options . getName ( ) . getBytes ( ) ) ) ) ; AzureMethod azureMethod = new AzureMethod ( this . provider ) ; try { azureMethod . post ( RESOURCE_AFFINITYGROUPS , createAffinityGroupModel ) ; } catch ( JAXBException e ) { logger . error ( e . getMessage ( ) ) ; throw new InternalException ( e ) ; } return AffinityGroup . getInstance ( createAffinityGroupModel . getName ( ) , createAffinityGroupModel . getName ( ) , createAffinityGroupModel . getDescription ( ) , createAffinityGroupModel . getLocation ( ) , null ) ; |
public class DatastreamReferencedContent { /** * Gets the external content manager which is used for the retrieval of
* content .
* @ return an instance of < code > ExternalContentManager < / code >
* @ throws Exception is thrown in case the server is not able to find the module . */
private ExternalContentManager getExternalContentManager ( ) throws Exception { } } | if ( s_ecm == null ) { Server server ; try { server = Server . getInstance ( new File ( Constants . FEDORA_HOME ) , false ) ; s_ecm = ( ExternalContentManager ) server . getModule ( "org.fcrepo.server.storage.ExternalContentManager" ) ; } catch ( InitializationException e ) { throw new Exception ( "Unable to get ExternalContentManager Module: " + e . getMessage ( ) , e ) ; } } return s_ecm ; |
public class MasterPlaylistSettings { /** * Sets the refreshType value for this MasterPlaylistSettings .
* @ param refreshType * Indicates how the master playlist gets refreshed . This field
* is optional and defaults to { @ link
* RefreshType # AUTOMATIC } . */
public void setRefreshType ( com . google . api . ads . admanager . axis . v201902 . RefreshType refreshType ) { } } | this . refreshType = refreshType ; |
public class Enhancements { /** * Returns a { @ link Collection } of { @ link EntityAnnotation } s which confidences values are greater than or equal
* to the value passed by parameter
* @ param confidenceValue Threshold confidence value
* @ return */
public Collection < EntityAnnotation > getEntityAnnotationsByConfidenceValue ( final Double confidenceValue ) { } } | return FluentIterable . from ( getEntityAnnotations ( ) ) . filter ( new Predicate < EntityAnnotation > ( ) { @ Override public boolean apply ( EntityAnnotation e ) { return e . confidence . doubleValue ( ) >= confidenceValue . doubleValue ( ) ; } } ) . toList ( ) ; |
public class AmazonRoute53DomainsClient { /** * Checks whether a domain name can be transferred to Amazon Route 53.
* @ param checkDomainTransferabilityRequest
* The CheckDomainTransferability request contains the following elements .
* @ return Result of the CheckDomainTransferability operation returned by the service .
* @ throws InvalidInputException
* The requested item is not acceptable . For example , for an OperationId it might refer to the ID of an
* operation that is already completed . For a domain name , it might not be a valid domain name or belong to
* the requester account .
* @ throws UnsupportedTLDException
* Amazon Route 53 does not support this top - level domain ( TLD ) .
* @ sample AmazonRoute53Domains . CheckDomainTransferability
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53domains - 2014-05-15 / CheckDomainTransferability "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public CheckDomainTransferabilityResult checkDomainTransferability ( CheckDomainTransferabilityRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCheckDomainTransferability ( request ) ; |
public class MethodPod { /** * Return the local method for this pod method , when the pod is on the
* same jvm . */
private MethodRefAmp findLocalMethod ( ) { } } | ServiceRefAmp serviceRefLocal = _serviceRef . getLocalService ( ) ; if ( serviceRefLocal == null ) { return null ; } if ( _type != null ) { return serviceRefLocal . methodByName ( _name , _type ) ; } else { return serviceRefLocal . methodByName ( _name ) ; } |
public class AnnivMasterScreen { /** * Add all the screen listeners . */
public void addListeners ( ) { } } | super . addListeners ( ) ; this . getMainRecord ( ) . getField ( AnnivMaster . CALENDAR_CATEGORY_ID ) . addListener ( new InitFieldHandler ( this . getRecord ( CalendarControl . CALENDAR_CONTROL_FILE ) . getField ( CalendarControl . ANNIVERSARY_CATEGORY_ID ) ) ) ; |
public class NodeTypeDataValidator { /** * Check according the JSR - 170 */
private void validateNodeType ( NodeTypeData nodeType ) throws RepositoryException { } } | if ( nodeType == null ) { throw new RepositoryException ( "NodeType object " + nodeType + " is null" ) ; } if ( nodeType . getName ( ) == null ) { throw new RepositoryException ( "NodeType implementation class " + nodeType . getClass ( ) . getName ( ) + " is not supported in this method" ) ; } for ( InternalQName sname : nodeType . getDeclaredSupertypeNames ( ) ) { if ( ! nodeType . getName ( ) . equals ( Constants . NT_BASE ) && nodeType . getName ( ) . equals ( sname ) ) { throw new RepositoryException ( "Invalid super type name" + sname . getAsString ( ) ) ; } } for ( PropertyDefinitionData pdef : nodeType . getDeclaredPropertyDefinitions ( ) ) { if ( ! pdef . getDeclaringNodeType ( ) . equals ( nodeType . getName ( ) ) ) { throw new RepositoryException ( "Invalid declared node type in property definitions with name " + pdef . getName ( ) . getAsString ( ) + " not registred" ) ; } // validate default values
try { validateValueDefaults ( pdef . getRequiredType ( ) , pdef . getDefaultValues ( ) ) ; } catch ( ValueFormatException e ) { throw new ValueFormatException ( "Default value is incompatible with Property type " + PropertyType . nameFromValue ( pdef . getRequiredType ( ) ) + " of " + pdef . getName ( ) . getAsString ( ) + " in nodetype " + nodeType . getName ( ) . getAsString ( ) , e ) ; } try { validateValueConstraints ( pdef . getRequiredType ( ) , pdef . getValueConstraints ( ) ) ; } catch ( ValueFormatException e ) { throw new ValueFormatException ( "Constraints is incompatible with Property type " + PropertyType . nameFromValue ( pdef . getRequiredType ( ) ) + " of " + pdef . getName ( ) . getAsString ( ) + " in nodetype " + nodeType . getName ( ) . getAsString ( ) , e ) ; } } for ( NodeDefinitionData cndef : nodeType . getDeclaredChildNodeDefinitions ( ) ) { if ( ! cndef . getDeclaringNodeType ( ) . equals ( nodeType . getName ( ) ) ) { throw new RepositoryException ( "Invalid declared node type in child node definitions with name " + cndef . getName ( ) . getAsString ( ) + " not registred" ) ; } } |
public class VolumeBindingUtil { /** * Resolves relative paths in the supplied { @ code bindingString } , and returns a binding string that has relative
* paths replaced with absolute paths . If the supplied { @ code bindingString } does not contain a relative path , it
* is returned unmodified .
* < h3 > Discussion : < / h3 >
* Volumes may be defined inside of { @ code service } blocks < a href = " https : / / docs . docker . com / compose / compose - file / compose - file - v2 / # volumes - volume _ driver " >
* as documented here < / a > :
* < pre >
* volumes :
* # Just specify a path and let the Engine create a volume
* - / var / lib / mysql
* # Specify an absolute path mapping
* - / opt / data : / var / lib / mysql
* # Path on the host , relative to the Compose file
* - . / cache : / tmp / cache
* # User - relative path
* - ~ / configs : / etc / configs / : ro
* # Named volume
* - datavolume : / var / lib / mysql "
* < / pre >
* This method only operates on volume strings that are relative : beginning with { @ code . / } , { @ code . . / } , or
* { @ code ~ } . Relative paths beginning with { @ code . / } or { @ code . . / } are absolutized relative to the supplied
* { @ code baseDir } , which < em > must < / em > be absolute . Paths beginning with { @ code ~ } are interpreted relative to
* { @ code new File ( System . getProperty ( " user . home " ) ) } , and { @ code baseDir } is ignored .
* Volume strings that do not begin with a { @ code . / } , { @ code . . / } , or { @ code ~ } are returned as - is .
* < h3 > Examples : < / h3 >
* Given { @ code baseDir } equal to " / path / to / basedir " and a { @ code bindingString } string equal to
* " . / reldir : / some / other / dir " , this method returns { @ code / path / to / basedir / reldir : / some / other / dir }
* Given { @ code baseDir } equal to " / path / to / basedir " and a { @ code bindingString } string equal to
* " . . / reldir : / some / other / dir " , this method returns { @ code / path / to / reldir : / some / other / dir }
* Given { @ code baseDir } equal to " / path / to / basedir " and a { @ code bindingString } string equal to
* " ~ / reldir : / some / other / dir " , this method returns { @ code / home / user / reldir : / some / other / dir }
* Given { @ code baseDir } equal to " / path / to / basedir " and a { @ code bindingString } equal to
* " src / test / docker : / some / other / dir " , this method returns { @ code / path / to / basedir / src / test / docker : / some / other / dir }
* Given a { @ code bindingString } equal to " foo : / some / other / dir " , this method returns { @ code foo : / some / other / dir } ,
* because { @ code foo } is considered to be a < em > named volume < / em > , not a relative path .
* @ param baseDir the base directory used to resolve relative paths ( e . g . beginning with { @ code . / } , { @ code . . / } ,
* { @ code ~ } ) present in the { @ code bindingString } ; < em > must < / em > be absolute
* @ param bindingString the volume string from the docker - compose file
* @ return the volume string , with any relative paths resolved as absolute paths
* @ throws IllegalArgumentException if the supplied { @ code baseDir } is not absolute */
public static String resolveRelativeVolumeBinding ( File baseDir , String bindingString ) { } } | // a ' services : ' - > service - > ' volumes : ' may be formatted as :
// ( https : / / docs . docker . com / compose / compose - file / compose - file - v2 / # volumes - volume _ driver )
// volumes :
// # Just specify a path and let the Engine create a volume
// - / var / lib / mysql
// # Specify an absolute path mapping
// - / opt / data : / var / lib / mysql
// # Path on the host , relative to the Compose file
// - . / cache : / tmp / cache
// # User - relative path
// - ~ / configs : / etc / configs / : ro
// # Named volume
// - datavolume : / var / lib / mysql
String [ ] pathParts = bindingString . split ( ":" ) ; String localPath = pathParts [ 0 ] ; if ( isRelativePath ( localPath ) ) { File resolvedFile ; if ( isUserHomeRelativePath ( localPath ) ) { resolvedFile = resolveAbsolutely ( prepareUserHomeRelativePath ( localPath ) , System . getProperty ( "user.home" ) ) ; } else { if ( ! baseDir . isAbsolute ( ) ) { throw new IllegalArgumentException ( "Base directory '" + baseDir + "' must be absolute." ) ; } resolvedFile = resolveAbsolutely ( localPath , baseDir . getAbsolutePath ( ) ) ; } try { localPath = resolvedFile . getCanonicalFile ( ) . getAbsolutePath ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "Unable to canonicalize '" + resolvedFile + "'" ) ; } } if ( pathParts . length > 1 ) { pathParts [ 0 ] = localPath ; return join ( ":" , pathParts ) ; } return localPath ; |
public class JCasUtil2 { /** * Returns token at the given position
* @ param jCas jCas
* @ param begin token begin position
* @ return Token or null */
public static Token findTokenByBeginPosition ( JCas jCas , int begin ) { } } | for ( Token token : JCasUtil . select ( getInitialView ( jCas ) , Token . class ) ) { if ( token . getBegin ( ) == begin ) { return token ; } } return null ; |
public class HttpContext { /** * Execute a POST call against the partial URL .
* @ param partialUrl The partial URL to build
* @ param payload The object to use for the POST
* @ return The response to the POST */
public Optional < Response > POST ( String partialUrl , Object payload ) { } } | URI uri = buildUri ( partialUrl ) ; return executePostRequest ( uri , payload ) ; |
public class TextHelper { /** * Get a copy of this object with the specified locales . The default locale is
* copied .
* @ param aMLT
* The initial multilingual text . May not be < code > null < / code > .
* @ param aContentLocales
* The list of locales of which the strings are desired . May not be
* < code > null < / code > .
* @ return The object containing only the texts of the given locales . Never
* < code > null < / code > . */
@ Nonnull @ ReturnsMutableCopy public static MultilingualText getCopyWithLocales ( @ Nonnull final IMultilingualText aMLT , @ Nonnull final Collection < Locale > aContentLocales ) { } } | final MultilingualText ret = new MultilingualText ( ) ; for ( final Locale aConrentLocale : aContentLocales ) if ( aMLT . texts ( ) . containsKey ( aConrentLocale ) ) ret . setText ( aConrentLocale , aMLT . getText ( aConrentLocale ) ) ; return ret ; |
public class RaftNetworkClient { /** * Start the { @ code RaftNetworkClient } .
* Following a call to { @ code start ( ) } the network threads
* will begin to do work . This means that the server component
* will accept and service incoming connections . Likewise , the
* the client component will attempt to establish outgoing
* connections to other Raft servers . As a result the caller
* no longer has exclusive access to underlying system resources .
* Once a successful call to { @ code start ( ) } is made subsequent calls are noops . */
public synchronized void start ( ) { } } | if ( running ) { return ; } LOGGER . info ( "{}: starting network client" , self . getId ( ) ) ; checkNotNull ( server ) ; checkNotNull ( client ) ; // IMPORTANT : set running _ early , up here _
// this is because the bind ( ) and connect ( )
// calls below will not succeed if their
// callbacks see " running " as false
running = true ; SocketAddress bindAddress = getResolvedBindAddress ( ) ; serverChannel = server . bind ( bindAddress ) ; for ( RaftMember server : cluster . values ( ) ) { connect ( server ) ; } |
public class Symm { /** * Create an encrypted password , making sure that even short passwords have a minimum length .
* @ param password
* @ param os
* @ throws IOException */
public void enpass ( final String password , final OutputStream os ) throws IOException { } } | final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; DataOutputStream dos = new DataOutputStream ( baos ) ; byte [ ] bytes = password . getBytes ( ) ; if ( this . getClass ( ) . getSimpleName ( ) . startsWith ( "base64" ) ) { // don ' t expose randomization
dos . write ( bytes ) ; } else { Random r = new SecureRandom ( ) ; int start = 0 ; byte b ; for ( int i = 0 ; i < 3 ; ++ i ) { dos . writeByte ( b = ( byte ) r . nextInt ( ) ) ; start += Math . abs ( b ) ; } start %= 0x7 ; for ( int i = 0 ; i < start ; ++ i ) { dos . writeByte ( r . nextInt ( ) ) ; } dos . writeInt ( ( int ) System . currentTimeMillis ( ) ) ; int minlength = Math . min ( 0x9 , bytes . length ) ; dos . writeByte ( minlength ) ; // expect truncation
if ( bytes . length < 0x9 ) { for ( int i = 0 ; i < bytes . length ; ++ i ) { dos . writeByte ( r . nextInt ( ) ) ; dos . writeByte ( bytes [ i ] ) ; } // make sure it ' s long enough
for ( int i = bytes . length ; i < 0x9 ; ++ i ) { dos . writeByte ( r . nextInt ( ) ) ; } } else { dos . write ( bytes ) ; } } // 7/21/2016 jg add AES Encryption to the mix
exec ( new AESExec ( ) { @ Override public void exec ( AES aes ) throws IOException { CipherInputStream cis = aes . inputStream ( new ByteArrayInputStream ( baos . toByteArray ( ) ) , true ) ; try { encode ( cis , os ) ; } finally { os . flush ( ) ; cis . close ( ) ; } } } ) ; synchronized ( ENC ) { } |
public class DescribeScalableTargetsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeScalableTargetsRequest describeScalableTargetsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeScalableTargetsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeScalableTargetsRequest . getServiceNamespace ( ) , SERVICENAMESPACE_BINDING ) ; protocolMarshaller . marshall ( describeScalableTargetsRequest . getResourceIds ( ) , RESOURCEIDS_BINDING ) ; protocolMarshaller . marshall ( describeScalableTargetsRequest . getScalableDimension ( ) , SCALABLEDIMENSION_BINDING ) ; protocolMarshaller . marshall ( describeScalableTargetsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( describeScalableTargetsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Strings { /** * Repeat a String { @ code repeat } times to form a new String .
* < pre >
* repeat ( null , 2 ) = null
* repeat ( " " , 0 ) = " "
* repeat ( " " , 2 ) = " "
* repeat ( " a " , 3 ) = " aaa "
* repeat ( " ab " , 2 ) = " abab "
* repeat ( " a " , - 2 ) = " "
* < / pre >
* @ param str the String to repeat , may be null
* @ param repeat number of times to repeat str , negative treated as zero
* @ return a new String consisting of the original String repeated , { @ code null } if null String
* input
* @ since 3.0 */
public static String repeat ( String str , int repeat ) { } } | if ( str == null ) return null ; if ( repeat <= 0 ) return Empty ; if ( repeat == 1 ) return str ; final int len = str . length ( ) ; final long longSize = ( long ) len * ( long ) repeat ; final int size = ( int ) longSize ; if ( size != longSize ) { throw new ArrayIndexOutOfBoundsException ( "Required array size too large: " + String . valueOf ( longSize ) ) ; } final char [ ] array = new char [ size ] ; str . getChars ( 0 , len , array , 0 ) ; int n ; for ( n = len ; n < size - n ; n <<= 1 ) { System . arraycopy ( array , 0 , array , n , n ) ; } System . arraycopy ( array , 0 , array , n , size - n ) ; return new String ( array ) ; |
public class SRTServletRequest { /** * If the session doesn ' t exist , then the Id that came
* in is invalid . If there is no sessionID in the request , then
* it ' s not valid . */
public boolean isRequestedSessionIdValid ( ) { } } | if ( WCCustomProperties . CHECK_REQUEST_OBJECT_IN_USE ) { checkRequestObjectInUse ( ) ; } // 321485
boolean sessionInvalid = _requestContext . isRequestedSessionIdValid ( ( ( WebAppDispatcherContext ) this . getDispatchContext ( ) ) . getWebApp ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15
logger . logp ( Level . FINE , CLASS_NAME , "isRequestedSessionIdValid" , " " + String . valueOf ( sessionInvalid ) ) ; } return sessionInvalid ; |
public class BibliographyFileReader { /** * Reads all items from an input stream and returns a provider
* serving these items . Note that you can supply an additional file
* name to help the method to determine the exact bibliography file format .
* If you don ' t know the file name you can pass null , but in this case the
* method ' s result might try to read the input stream using the wrong
* file format ( depending on the input stream ' s contents ) . Also note
* that the caller is responsible for closing the given input stream .
* @ param bibstream the input stream
* @ param filename the name of the input file ( can be null if you don ' t
* know the name )
* @ return the provider
* @ throws IOException if the input stream could not be read */
public ItemDataProvider readBibliographyFile ( InputStream bibstream , String filename ) throws IOException { } } | BufferedInputStream bis ; if ( bibstream instanceof BufferedInputStream ) { bis = ( BufferedInputStream ) bibstream ; } else { bis = new BufferedInputStream ( bibstream ) ; } // determine file format
FileFormat ff = determineFileFormat ( bis , filename ) ; // read stream
return readBibliographyFile ( bis , ff ) ; |
public class Mixer2Engine { /** * replace the reference of character entity reference to numeric character
* reference .
* @ param sb
* xhtml template
* @ return replaced xhtml template */
public StringBuilder replaceNamedEntity ( StringBuilder sb ) { } } | for ( NamedEntityEnum nEnum : NamedEntityEnum . values ( ) ) { int i ; while ( ( i = sb . indexOf ( nEnum . getName ( ) ) ) > - 1 ) { sb . replace ( i , i + nEnum . getName ( ) . length ( ) , nEnum . getNumber ( ) ) ; } } return sb ; |
public class DecimalFormat { /** * Formats a BigDecimal number . */
@ Override public StringBuffer format ( java . math . BigDecimal number , StringBuffer result , FieldPosition fieldPosition ) { } } | return format ( number , result , fieldPosition , false ) ; |
public class ExampleSection { /** * Selects an example . If there is an error instantiating the component , an error message will be displayed .
* @ param example the ExampleData of the example to select . */
public void selectExample ( final ExampleData example ) { } } | try { StringBuilder exampleName = new StringBuilder ( ) ; if ( example . getExampleGroupName ( ) != null && ! example . getExampleGroupName ( ) . equals ( "" ) ) { exampleName . append ( example . getExampleGroupName ( ) ) . append ( " - " ) ; } exampleName . append ( example . getExampleName ( ) ) ; selectExample ( example . getExampleClass ( ) . newInstance ( ) , exampleName . toString ( ) ) ; } catch ( Exception e ) { WMessages . getInstance ( this ) . error ( "Error selecting example \"" + example . getExampleName ( ) + '"' ) ; selectExample ( new ErrorComponent ( e . getMessage ( ) , e ) , "Error" ) ; } |
public class ClassifierKNearestNeighborsBow { /** * Finds the scene which most resembles the provided image
* @ param image Image that ' s to be classified
* @ return The index of the scene it most resembles */
public int classify ( T image ) { } } | if ( numNeighbors == 0 ) throw new IllegalArgumentException ( "Must specify number of neighbors!" ) ; // compute all the features inside the image
describe . process ( image ) ; // find which word the feature matches and construct a frequency histogram
featureToHistogram . reset ( ) ; List < Desc > imageFeatures = describe . getDescriptions ( ) ; for ( int i = 0 ; i < imageFeatures . size ( ) ; i ++ ) { Desc d = imageFeatures . get ( i ) ; featureToHistogram . addFeature ( d ) ; } featureToHistogram . process ( ) ; temp . histogram = featureToHistogram . getHistogram ( ) ; // Find the N most similar image histograms
resultsNN . reset ( ) ; search . findNearest ( temp , - 1 , numNeighbors , resultsNN ) ; // Find the most common scene among those neighbors
Arrays . fill ( scenes , 0 ) ; for ( int i = 0 ; i < resultsNN . size ; i ++ ) { NnData < HistogramScene > data = resultsNN . get ( i ) ; HistogramScene n = data . point ; // scenes [ n . type ] + + ;
scenes [ n . type ] += 1.0 / ( data . distance + 0.005 ) ; // todo
// scenes [ n . type ] + = 1.0 / ( Math . sqrt ( data . distance ) + 0.005 ) ; / / todo
} // pick the scene with the highest frequency
int bestIndex = 0 ; double bestCount = 0 ; for ( int i = 0 ; i < scenes . length ; i ++ ) { if ( scenes [ i ] > bestCount ) { bestCount = scenes [ i ] ; bestIndex = i ; } } return bestIndex ; |
public class ClientSupportImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . clientcontainer . remote . common . ClientSupport # listRemoteInstances ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String ) */
@ Override public Collection < ? extends NameClassPair > listRemoteInstances ( String appName , String moduleName , String compName , String namespaceString , String nameInContext ) throws NamingException { } } | Collection < NameClassPair > allInstances = new HashSet < NameClassPair > ( ) ; NamingConstants . JavaColonNamespace namespace = NamingConstants . JavaColonNamespace . fromName ( namespaceString ) ; ComponentMetaData cmd = getCMD ( appName , moduleName , compName , namespace ) ; try { ComponentMetaDataAccessorImpl . getComponentMetaDataAccessor ( ) . beginContext ( cmd ) ; Iterator < RemoteJavaColonNamingHelper > remoteJCNHelpers = remoteJavaColonNamingHelpers . getServices ( ) ; while ( remoteJCNHelpers . hasNext ( ) ) { RemoteJavaColonNamingHelper helper = remoteJCNHelpers . next ( ) ; allInstances . addAll ( helper . listRemoteInstances ( namespace , nameInContext ) ) ; } } finally { ComponentMetaDataAccessorImpl . getComponentMetaDataAccessor ( ) . endContext ( ) ; } return allInstances ; |
public class ComponentEnhancer { /** * Store annotated method related to a lifecycle phase .
* @ param component the JRebirth component to manage
* @ param lifecycleMethod the map that store methods
* @ param annotationClass the annotation related to lifecycle phase */
private static void manageLifecycleAnnotation ( final Component < ? > component , final MultiMap < String , Method > lifecycleMethod , final Class < ? extends Annotation > annotationClass ) { } } | for ( final Method method : ClassUtility . getAnnotatedMethods ( component . getClass ( ) , annotationClass ) ) { // Add a method to the multimap entry
// TODO sort
lifecycleMethod . add ( annotationClass . getName ( ) , method ) ; } |
public class Vector { /** * Addition from two vectors */
public Vector add ( Vector b ) { } } | if ( ( b == null ) || ( size != b . size ) ) return null ; int i ; Vector result = new Vector ( size ) ; for ( i = 0 ; i < size ; i ++ ) result . vector [ i ] = vector [ i ] + b . vector [ i ] ; return result ; |
public class ServerDefaultHttpHandler { /** * ( non - Javadoc )
* @ see io . undertow . server . HttpHandler # handleRequest ( io . undertow . server . HttpServerExchange ) */
@ Override public void handleRequest ( final HttpServerExchange exchange ) throws Exception { } } | if ( this . defaultResponseListener != null ) { exchange . addDefaultResponseListener ( defaultResponseListener ) ; } long fiGlobal = this . headers . fastIterateNonEmpty ( ) ; while ( fiGlobal != - 1 ) { final HeaderValues headerValues = headers . fiCurrent ( fiGlobal ) ; exchange . getResponseHeaders ( ) . addAll ( headerValues . getHeaderName ( ) , headerValues ) ; fiGlobal = headers . fiNextNonEmpty ( fiGlobal ) ; } next . handleRequest ( exchange ) ; |
public class InsufficientOperationalNodesException { /** * Helper method to get a list of node ids .
* @ param nodeList */
private static List < Integer > stripNodeIds ( List < Node > nodeList ) { } } | List < Integer > nodeidList = new ArrayList < Integer > ( ) ; if ( nodeList != null ) { for ( Node node : nodeList ) { nodeidList . add ( node . getId ( ) ) ; } } return nodeidList ; |
public class ModelRegistry { /** * Finds the most specific models for the given { @ link Resource } , i . e . the
* first model ( s ) found when traversing the resource ' s
* { @ link MappableTypeHierarchy mappable hierarchy } .
* @ param resource must not be < code > null < / code > .
* @ return the model sources , or < code > null < / code > if no models exist for the resource . */
Collection < LookupResult > lookupMostSpecificModels ( Resource resource ) { } } | if ( resource == null ) { throw new IllegalArgumentException ( "Method argument resource must not be null." ) ; } final Key key = key ( resource ) ; if ( isUnmapped ( key ) ) { return null ; } Collection < LookupResult > sources = lookupFromCache ( key ) ; if ( sources == null ) { final int currentStateId = this . state . get ( ) ; sources = resolveMostSpecificModelSources ( resource ) ; if ( sources . isEmpty ( ) ) { markAsUnmapped ( key , currentStateId ) ; } else { cache ( key , sources , currentStateId ) ; } } return nullIfEmpty ( sources ) ; |
public class MicroMetaDao { /** * 锟斤拷锟斤拷锟揭筹拷锟绞硷拷锟铰嘉伙拷锟 � */
public int calcuStartIndex ( int pageNum , int onePageCount ) { } } | int startIndex = 0 ; String tempType = calcuDbType ( ) ; if ( tempType != null && tempType . equals ( "mysql" ) ) { // if ( dbType ! = null & & dbType . equals ( " mysql " ) ) {
startIndex = pageNum * onePageCount ; } else { startIndex = pageNum * onePageCount + 1 ; } return startIndex ; |
public class MemcachedClient { /** * Asynchronous CAS operation using the default transcoder .
* @ param key the key
* @ param casId the CAS identifier ( from a gets operation )
* @ param value the new value
* @ return a future that will indicate the status of the CAS
* @ throws IllegalStateException in the rare circumstance where queue is too
* full to accept any more requests */
@ Override public OperationFuture < CASResponse > asyncCAS ( String key , long casId , Object value ) { } } | return asyncCAS ( key , casId , value , transcoder ) ; |
public class SingleEvaluatedMoveCache { /** * Cache validation of the given move , discarding any previously cached value .
* @ param move move applied to the current solution
* @ param validation validation of obtained neighbour */
@ Override public final void cacheMoveValidation ( Move < ? > move , Validation validation ) { } } | validatedMove = move ; this . validation = validation ; |
public class ObjectCacheTwoLevelImpl { /** * Put object to session cache .
* @ param oid The { @ link org . apache . ojb . broker . Identity } of the object to cache
* @ param entry The { @ link org . apache . ojb . broker . cache . ObjectCacheTwoLevelImpl . CacheEntry } of the object
* @ param onlyIfNew Flag , if set < em > true < / em > only new objects ( not already in session cache ) be cached . */
private void putToSessionCache ( Identity oid , CacheEntry entry , boolean onlyIfNew ) { } } | if ( onlyIfNew ) { // no synchronization needed , because session cache was used per broker instance
if ( ! sessionCache . containsKey ( oid ) ) sessionCache . put ( oid , entry ) ; } else { sessionCache . put ( oid , entry ) ; } |
public class UserDeserializer { /** * Gson invokes this call - back method during deserialization when it encounters a field of the specified type .
* @ param element The Json data being deserialized
* @ param type The type of the Object to deserialize to
* @ param context The JSON deserialization context
* @ return The user */
@ Override public User deserialize ( JsonElement element , Type type , JsonDeserializationContext context ) throws JsonParseException { } } | JsonObject obj = element . getAsJsonObject ( ) ; JsonElement user = obj . get ( "user" ) ; if ( user != null && user . isJsonObject ( ) ) return gson . fromJson ( user , User . class ) ; return gson . fromJson ( element , User . class ) ; |
public class EndpointBuilder { /** * Returns either a client proxy or { @ link HTTPServerEndpoint } instance .
* @ return building result */
@ SuppressWarnings ( "unchecked" ) public T get ( ) { } } | T result ; if ( uri != null ) { if ( classLoader == null ) { classLoader = clazz . getClassLoader ( ) ; } Endpoint clientEndpoint = new HTTPClientEndpoint ( uri ) ; if ( logger != null ) { clientEndpoint = new LoggerEndpoint ( clientEndpoint , logger ) ; } result = new FacadeEndpoint < T > ( clientEndpoint , classLoader , clazz , typeConverter , additionalInterfaces . toArray ( new Class < ? > [ additionalInterfaces . size ( ) ] ) ) . newProxy ( ) ; } else { ObjectEndpoint endpoint = new ObjectEndpoint ( ) ; endpoint . faultMapper ( faultMapper ) ; endpoint . typeConverter ( typeConverter ) ; endpoint . export ( server , clazz ) ; for ( Class < ? > i : additionalInterfaces ) { endpoint . export ( server , i ) ; } result = ( T ) new HTTPServerEndpoint ( endpoint ) ; } return result ; |
public class Service { /** * < pre >
* Configuration controlling usage of this service .
* < / pre >
* < code > . google . api . Usage usage = 15 ; < / code > */
public com . google . api . Usage getUsage ( ) { } } | return usage_ == null ? com . google . api . Usage . getDefaultInstance ( ) : usage_ ; |
public class FSDatasetDescriptor { /** * A helper to determine if the path description of this { @ link DatasetDescriptor } is a superset of paths
* accepted by the other { @ link DatasetDescriptor } . If the path description of the other { @ link DatasetDescriptor }
* is a glob pattern , we return false .
* @ param otherPath a glob pattern that describes a set of paths .
* @ return true if the glob pattern described by the otherPath matches the path in this { @ link DatasetDescriptor } . */
private boolean isPathContaining ( String otherPath ) { } } | if ( otherPath == null ) { return false ; } if ( DatasetDescriptorConfigKeys . DATASET_DESCRIPTOR_CONFIG_ANY . equals ( this . getPath ( ) ) ) { return true ; } if ( PathUtils . isGlob ( new Path ( otherPath ) ) ) { return false ; } GlobPattern globPattern = new GlobPattern ( this . getPath ( ) ) ; return globPattern . matches ( otherPath ) ; |
public class V1KnowledgeComponentImplementationModel { /** * { @ inheritDoc } */
@ Override public LoggersModel getLoggers ( ) { } } | if ( _loggers == null ) { _loggers = ( LoggersModel ) getFirstChildModel ( LOGGERS ) ; } return _loggers ; |
public class PremiumRateServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . admanager . axis . v201902 . PremiumRateServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201902 . PremiumRateServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201902 . PremiumRateServiceSoapBindingStub ( new java . net . URL ( PremiumRateServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getPremiumRateServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class ParameterUtil { /** * This is a utility method intended provided to help the JPA module . */
public static IQueryParameterAnd < ? > parseQueryParams ( FhirContext theContext , RestSearchParameterTypeEnum paramType , String theUnqualifiedParamName , List < QualifiedParamList > theParameters ) { } } | QueryParameterAndBinder binder = null ; switch ( paramType ) { case COMPOSITE : throw new UnsupportedOperationException ( ) ; case DATE : binder = new QueryParameterAndBinder ( DateAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case NUMBER : binder = new QueryParameterAndBinder ( NumberAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case QUANTITY : binder = new QueryParameterAndBinder ( QuantityAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case REFERENCE : binder = new QueryParameterAndBinder ( ReferenceAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case STRING : binder = new QueryParameterAndBinder ( StringAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case TOKEN : binder = new QueryParameterAndBinder ( TokenAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case URI : binder = new QueryParameterAndBinder ( UriAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; case HAS : binder = new QueryParameterAndBinder ( HasAndListParam . class , Collections . < Class < ? extends IQueryParameterType > > emptyList ( ) ) ; break ; } // FIXME null access
return binder . parse ( theContext , theUnqualifiedParamName , theParameters ) ; |
public class AbstractWebServer { /** * Set the { @ link PippoFilter } instance .
* This method call { @ link PippoFilter # setApplication ( Application ) } to end .
* @ param pippoFilter
* @ return */
@ Override public WebServer < T > setPippoFilter ( PippoFilter pippoFilter ) { } } | this . pippoFilter = pippoFilter ; pippoFilter . setApplication ( application ) ; return this ; |
public class RaidDFSUtil { /** * Returns the corrupt blocks in a file . */
public static List < LocatedBlock > corruptBlocksInFile ( DistributedFileSystem dfs , String path , long offset , long length ) throws IOException { } } | List < LocatedBlock > corrupt = new LinkedList < LocatedBlock > ( ) ; LocatedBlocks locatedBlocks = getBlockLocations ( dfs , path , offset , length ) ; for ( LocatedBlock b : locatedBlocks . getLocatedBlocks ( ) ) { if ( b . isCorrupt ( ) || ( b . getLocations ( ) . length == 0 && b . getBlockSize ( ) > 0 ) ) { corrupt . add ( b ) ; } } return corrupt ; |
public class ReflectiveInterceptor { /** * Implementation of java . lang . class . getDeclaredMethod ( String name , Class . . . params ) . */
@ UsedByGeneratedCode public static Method jlClassGetDeclaredMethod ( Class < ? > clazz , String name , Class < ? > ... params ) throws SecurityException , NoSuchMethodException { } } | ReloadableType rtype = getRType ( clazz ) ; if ( rtype == null ) { // Not reloadable . . .
return clazz . getDeclaredMethod ( name , params ) ; } else { // Reloadable
MethodProvider methods = MethodProvider . create ( rtype ) ; Invoker method = methods . getDeclaredMethod ( name , params ) ; if ( method == null ) { throw Exceptions . noSuchMethodException ( clazz , name , params ) ; } else { return method . createJavaMethod ( ) ; } } |
public class HttpBase { /** * 新增请求头 < br >
* 不覆盖原有请求头
* @ param headers 请求头
* @ return this
* @ since 4.0.3 */
public T addHeaders ( Map < String , String > headers ) { } } | if ( CollectionUtil . isEmpty ( headers ) ) { return ( T ) this ; } for ( Entry < String , String > entry : headers . entrySet ( ) ) { this . header ( entry . getKey ( ) , StrUtil . nullToEmpty ( entry . getValue ( ) ) , false ) ; } return ( T ) this ; |
public class Mapper { /** * Creates a map where the object at index N from the first Iterator is the key for the object at index N of the
* second Iterator . < br > By default discards both key and value if either one is null .
* @ param keys Iterator of keys
* @ param values Iterator of values
* @ return map */
public static Map zip ( Iterator keys , Iterator values ) { } } | return zip ( keys , values , false ) ; |
public class CreateSnapshotScheduleResult { /** * @ param nextInvocations
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateSnapshotScheduleResult withNextInvocations ( java . util . Collection < java . util . Date > nextInvocations ) { } } | setNextInvocations ( nextInvocations ) ; return this ; |
public class LogSignAlgebra { /** * Converts a real value to its compacted representation . */
@ Override public double fromReal ( double x ) { } } | long sign = POSITIVE ; if ( x < 0 ) { sign = NEGATIVE ; x = - x ; } return compact ( sign , FastMath . log ( x ) ) ; |
public class DividableGridAdapter { /** * Inflates the view , which is used to visualize an item .
* @ param parent
* The parent of the view , which should be inflated , as an instance of the class { @ link
* ViewGroup } or null , if no parent is available
* @ return The view , which has been inflated , as an instance of the class { @ link View } */
private View inflateItemView ( @ Nullable final ViewGroup parent ) { } } | LayoutInflater layoutInflater = LayoutInflater . from ( context ) ; View view = layoutInflater . inflate ( style == Style . GRID ? R . layout . grid_item : R . layout . list_item , parent , false ) ; ItemViewHolder viewHolder = new ItemViewHolder ( ) ; viewHolder . iconImageView = view . findViewById ( android . R . id . icon ) ; viewHolder . titleTextView = view . findViewById ( android . R . id . title ) ; view . setTag ( viewHolder ) ; return view ; |
public class BicValidator { /** * { @ inheritDoc } check if given string is a valid BIC .
* @ see javax . validation . ConstraintValidator # isValid ( java . lang . Object ,
* javax . validation . ConstraintValidatorContext ) */
@ Override public final boolean isValid ( final Object pvalue , final ConstraintValidatorContext pcontext ) { } } | final String valueAsString ; if ( ignoreWhitspaces ) { valueAsString = Objects . toString ( pvalue , StringUtils . EMPTY ) . replaceAll ( "\\s+" , StringUtils . EMPTY ) ; } else { valueAsString = Objects . toString ( pvalue , null ) ; } if ( StringUtils . isEmpty ( valueAsString ) ) { // empty field is ok
return true ; } if ( valueAsString . length ( ) != BIC_LENGTH_MIN && valueAsString . length ( ) != BIC_LENGTH_MAX ) { // to short or to long , but it ' s handled by size validator !
return true ; } if ( ! valueAsString . matches ( BIC_REGEX ) ) { // format is wrong !
return false ; } final String countryCode = valueAsString . substring ( 4 , 6 ) ; final IbanLengthDefinition validBicLength = IBAN_LENGTH_MAP . ibanLengths ( ) . get ( countryCode ) ; return validBicLength != null ; |
public class CmsAliasTableController { /** * This method is called after the mode of an alias has been edited . < p >
* @ param row the edited row
* @ param mode the new alias mode */
public void editAliasMode ( CmsAliasTableRow row , CmsAliasMode mode ) { } } | row . setMode ( mode ) ; row . setEdited ( true ) ; |
public class TopicRefWriter { /** * Retrieve the element ID from the path . If there is no element ID , return topic ID . */
private String getElementID ( final String relativePath ) { } } | final String fragment = getFragment ( relativePath ) ; if ( fragment != null ) { if ( fragment . lastIndexOf ( SLASH ) != - 1 ) { return fragment . substring ( fragment . lastIndexOf ( SLASH ) + 1 ) ; } else { return fragment ; } } return null ; |
public class InMemoryBulkheadRegistry { /** * { @ inheritDoc } */
@ Override public Bulkhead bulkhead ( String name , BulkheadConfig config ) { } } | return computeIfAbsent ( name , ( ) -> Bulkhead . of ( name , Objects . requireNonNull ( config , CONFIG_MUST_NOT_BE_NULL ) ) ) ; |
public class StorageProviderFactoryImpl { /** * This method returns all of the registered storage accounts .
* @ return list of storage accounts */
@ Override public List < StorageAccount > getStorageAccounts ( ) { } } | List < StorageAccount > accts = new ArrayList < > ( ) ; Iterator < String > ids = getAccountManager ( ) . getStorageAccountIds ( ) ; while ( ids . hasNext ( ) ) { accts . add ( getAccountManager ( ) . getStorageAccount ( ids . next ( ) ) ) ; } return accts ; |
public class PropertiesUtils { /** * Converts a { @ link Properties } object to a { @ link Map } where each key is a { @ link String } . */
public static Map < String , ? > propsToStringKeyMap ( Properties properties ) { } } | ImmutableMap . Builder < String , Object > mapBuilder = ImmutableMap . builder ( ) ; for ( Map . Entry < Object , Object > entry : properties . entrySet ( ) ) { mapBuilder . put ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) ) ; } return mapBuilder . build ( ) ; |
public class RenderTheme { /** * Matches a node with the given parameters against this RenderTheme .
* @ param renderCallback the callback implementation which will be executed on each match .
* @ param renderContext
* @ param poi the point of interest . */
public synchronized void matchNode ( RenderCallback renderCallback , final RenderContext renderContext , PointOfInterest poi ) { } } | MatchingCacheKey matchingCacheKey = new MatchingCacheKey ( poi . tags , renderContext . rendererJob . tile . zoomLevel , Closed . NO ) ; List < RenderInstruction > matchingList = this . poiMatchingCache . get ( matchingCacheKey ) ; if ( matchingList != null ) { // cache hit
for ( int i = 0 , n = matchingList . size ( ) ; i < n ; ++ i ) { matchingList . get ( i ) . renderNode ( renderCallback , renderContext , poi ) ; } return ; } // cache miss
matchingList = new ArrayList < RenderInstruction > ( ) ; for ( int i = 0 , n = this . rulesList . size ( ) ; i < n ; ++ i ) { this . rulesList . get ( i ) . matchNode ( renderCallback , renderContext , matchingList , poi ) ; } this . poiMatchingCache . put ( matchingCacheKey , matchingList ) ; |
public class MemoryInfo { /** * Returns an estimation , in bytes , of the memory usage of the given objects plus ( recursively )
* objects referenced via non - static references from any of those objects via non - public fields . If
* two or more of the given objects reference the same Object X , then the memory used by Object X
* will only be counted once . However , the method guarantees that the memory for a given object
* ( either in the passed - in collection or found while traversing the object graphs from those
* objects ) will not be counted more than once . The estimate for each individual object is provided
* by the running JVM and is likely to be as accurate a measure as can be reasonably made by the
* running Java program . It will generally include memory taken up for " housekeeping " of that
* object .
* @ param objs The collection of objects whose memory usage is to be totalled .
* @ return An estimate , in bytes , of the total heap memory taken up by the obejcts in objs and ,
* recursively , objects referenced by private or protected ( non - static ) fields .
* @ throws IOException */
public static long deepMemoryUsageOfAll ( Instrumentation inst , final Collection < ? extends java . lang . Object > objs ) throws IOException { } } | return deepMemoryUsageOfAll ( inst , objs , NON_PUBLIC ) ; |
public class DefaultOverlayService { /** * { @ inheritDoc } */
@ Override public Boolean showOverlay ( JComponent targetComponent , JComponent overlay ) { } } | overlay . setVisible ( Boolean . TRUE ) ; return Boolean . TRUE ; |
public class VariableScopeImpl { /** * only called when a new variable is created on this variable scope . This method is also responsible for propagating the creation of this variable to the history . */
protected void createVariableLocal ( String variableName , Object value , ExecutionEntity sourceActivityExecution ) { } } | ensureVariableInstancesInitialized ( ) ; if ( variableInstances . containsKey ( variableName ) ) { throw new ActivitiException ( "variable '" + variableName + "' already exists. Use setVariableLocal if you want to overwrite the value" ) ; } createVariableInstance ( variableName , value , sourceActivityExecution ) ; |
public class Bidi { /** * Perform the Unicode Bidi algorithm on a given paragraph , as defined in the
* < a href = " http : / / www . unicode . org / unicode / reports / tr9 / " > Unicode Standard Annex # 9 < / a > ,
* version 13,
* also described in The Unicode Standard , Version 4.0 . < p >
* This method takes a paragraph of text and computes the
* left - right - directionality of each character . The text should not
* contain any Unicode block separators . < p >
* The RUN _ DIRECTION attribute in the text , if present , determines the base
* direction ( left - to - right or right - to - left ) . If not present , the base
* direction is computed using the Unicode Bidirectional Algorithm ,
* defaulting to left - to - right if there are no strong directional characters
* in the text . This attribute , if present , must be applied to all the text
* in the paragraph . < p >
* The BIDI _ EMBEDDING attribute in the text , if present , represents
* embedding level information . Negative values from - 1 to - 62 indicate
* overrides at the absolute value of the level . Positive values from 1 to
* 62 indicate embeddings . Where values are zero or not defined , the base
* embedding level as determined by the base direction is assumed . < p >
* The NUMERIC _ SHAPING attribute in the text , if present , converts European
* digits to other decimal digits before running the bidi algorithm . This
* attribute , if present , must be applied to all the text in the paragraph .
* If the entire text is all of the same directionality , then
* the method may not perform all the steps described by the algorithm ,
* i . e . , some levels may not be the same as if all steps were performed .
* This is not relevant for unidirectional text . < br >
* For example , in pure LTR text with numbers the numbers would get
* a resolved level of 2 higher than the surrounding text according to
* the algorithm . This implementation may set all resolved levels to
* the same value in such a case . < p >
* @ param paragraph a paragraph of text with optional character and
* paragraph attribute information */
public void setPara ( AttributedCharacterIterator paragraph ) { } } | byte paraLvl ; Boolean runDirection = ( Boolean ) paragraph . getAttribute ( TextAttribute . RUN_DIRECTION ) ; if ( runDirection == null ) { paraLvl = LEVEL_DEFAULT_LTR ; } else { paraLvl = ( runDirection . equals ( TextAttribute . RUN_DIRECTION_LTR ) ) ? LTR : RTL ; } byte [ ] lvls = null ; int len = paragraph . getEndIndex ( ) - paragraph . getBeginIndex ( ) ; byte [ ] embeddingLevels = new byte [ len ] ; char [ ] txt = new char [ len ] ; int i = 0 ; char ch = paragraph . first ( ) ; while ( ch != AttributedCharacterIterator . DONE ) { txt [ i ] = ch ; Integer embedding = ( Integer ) paragraph . getAttribute ( TextAttribute . BIDI_EMBEDDING ) ; if ( embedding != null ) { byte level = embedding . byteValue ( ) ; if ( level == 0 ) { /* no - op */
} else if ( level < 0 ) { lvls = embeddingLevels ; embeddingLevels [ i ] = ( byte ) ( ( 0 - level ) | LEVEL_OVERRIDE ) ; } else { lvls = embeddingLevels ; embeddingLevels [ i ] = level ; } } ch = paragraph . next ( ) ; ++ i ; } NumericShaper shaper = ( NumericShaper ) paragraph . getAttribute ( TextAttribute . NUMERIC_SHAPING ) ; if ( shaper != null ) { shaper . shape ( txt , 0 , len ) ; } setPara ( txt , paraLvl , lvls ) ; |
public class WVideo { /** * Creates dynamic URLs that the video clips can be loaded from . In fact the URL points to the main application
* servlet , but includes a non - null for the parameter associated with this WComponent ( ie , its label ) . The
* handleRequest method below detects this when the browser requests a file .
* @ return the urls to load the video files from , or null if there are no clips defined . */
public String [ ] getTrackUrls ( ) { } } | Track [ ] tracks = getTracks ( ) ; if ( tracks == null || tracks . length == 0 ) { return null ; } String [ ] urls = new String [ tracks . length ] ; // this variable needs to be set in the portlet environment .
String url = getEnvironment ( ) . getWServletPath ( ) ; Map < String , String > parameters = getBaseParameterMap ( ) ; for ( int i = 0 ; i < urls . length ; i ++ ) { parameters . put ( TRACK_INDEX_REQUEST_PARAM_KEY , String . valueOf ( i ) ) ; urls [ i ] = WebUtilities . getPath ( url , parameters , true ) ; } return urls ; |
public class Router { /** * GO !
* @ param request The request .
* @ param response The response . */
void doMethod ( HttpServletRequest request , HttpServletResponse response , HttpMethod httpMethod ) { } } | // Locate a request handler :
String requestPath = mapRequestPath ( request ) ; Route route = api . get ( requestPath ) ; try { if ( route != null && route . requestHandlers . containsKey ( httpMethod ) ) { handleRequest ( request , response , route , httpMethod ) ; } else { handleNotFound ( request , response ) ; } } catch ( Throwable t ) { // Chances are the exception we ' ve actually caught is the reflection
// one from Method . invoke ( . . . )
Throwable caught = t ; if ( InvocationTargetException . class . isAssignableFrom ( t . getClass ( ) ) ) { caught = t . getCause ( ) ; } RequestHandler requestHandler = ( route == null ? null : route . requestHandlers . get ( httpMethod ) ) ; handleError ( request , response , requestHandler , caught ) ; } |
public class IO { /** * Writes an int value to a series of bytes . The values are written using
* < a href = " http : / / lucene . apache . org / core / 3_5_0 / fileformats . html # VInt " > variable - length < / a >
* < a href = " https : / / developers . google . com / protocol - buffers / docs / encoding ? csw = 1 # types " > zig - zag < / a >
* coding . Each { @ code int } value is written in 1 to 5 bytes .
* @ see # readInt ( DataInput )
* @ param value the integer value to write
* @ param out the data output the integer value is written to
* @ throws NullPointerException if the given data output is { @ code null }
* @ throws IOException if an I / O error occurs */
static void writeInt ( final int value , final DataOutput out ) throws IOException { } } | // Zig - zag encoding .
int n = ( value << 1 ) ^ ( value >> 31 ) ; if ( ( n & ~ 0x7F ) != 0 ) { out . write ( ( byte ) ( ( n | 0x80 ) & 0xFF ) ) ; n >>>= 7 ; if ( n > 0x7F ) { out . write ( ( byte ) ( ( n | 0x80 ) & 0xFF ) ) ; n >>>= 7 ; if ( n > 0x7F ) { out . write ( ( byte ) ( ( n | 0x80 ) & 0xFF ) ) ; n >>>= 7 ; if ( n > 0x7F ) { out . write ( ( byte ) ( ( n | 0x80 ) & 0xFF ) ) ; n >>>= 7 ; } } } } out . write ( ( byte ) n ) ; |
public class AbstractSubCodeBuilderFragment { /** * Replies the " an " or " a " article according to the given word .
* < p > This function does not follow the real English grammatical rule , but it is
* an acceptable approximation .
* @ param word the word that follows the article .
* @ return the article . */
protected static String getAorAnArticle ( String word ) { } } | if ( Arrays . asList ( 'a' , 'e' , 'i' , 'o' , 'u' , 'y' ) . contains ( Character . toLowerCase ( word . charAt ( 0 ) ) ) ) { return "an" ; // $ NON - NLS - 1 $
} return "a" ; // $ NON - NLS - 1 $ |
public class JMSContextInjected { /** * If there is a transaction in progress , then create a new JMSContext add to Transaction registry and return the same
* If there is no transaction , then create a new context and return the JMSContext */
private synchronized JMSContext getInternalJMSContext ( ) { } } | TransactionSynchronizationRegistry tranSyncRegistry = null ; try { boolean tranIsActive = false ; tranSyncRegistry = ( TransactionSynchronizationRegistry ) new InitialContext ( ) . lookup ( TSR_LOOKUP_NAME ) ; if ( tranSyncRegistry != null ) { tranIsActive = ( tranSyncRegistry . getTransactionStatus ( ) == Status . STATUS_ACTIVE ) ; } if ( tranIsActive ) { Object resource = tranSyncRegistry . getResource ( jmsContextInfo ) ; if ( resource != null ) { return ( JMSContext ) resource ; } else { final JMSContext transactedContext = createJMSContext ( jmsContextInfo , tranIsActive ) ; // Once the new JMSContext is created , add it to the transaction registry to
// retrieve it later if the new JMSContext is requested for the same configuration
// but in same transactional context
tranSyncRegistry . putResource ( jmsContextInfo , transactedContext ) ; // Register a Synchronization object in transaction registry for a call back
// when the transaction is complete , we will close the JMSContext once the
// Transaction is complete
tranSyncRegistry . registerInterposedSynchronization ( new Synchronization ( ) { @ Override public void beforeCompletion ( ) { } @ Override public synchronized void afterCompletion ( int status ) { transactedContext . close ( ) ; inTransaction = false ; } } ) ; return transactedContext ; } } else { // Non transacted , create a new JMSContext and return it
if ( internalJMSContext == null ) { internalJMSContext = createJMSContext ( jmsContextInfo , tranIsActive ) ; } return internalJMSContext ; } } catch ( Exception e ) { throw new RuntimeException ( e . getLocalizedMessage ( ) , e ) ; } |
public class GrammarAccess { /** * Creates an identifier for a Rule which is a valid Java idetifier and unique within
* the Rule ' s grammar .
* @ param rule the Rule
* @ return the identifier */
public String gaRuleIdentifyer ( final AbstractRule rule ) { } } | final String plainName = RuleNames . getRuleNames ( rule ) . getUniqueRuleName ( rule ) ; return this . toJavaIdentifier ( plainName , true ) ; |
public class DescribeVTLDevicesResult { /** * An array of VTL device objects composed of the Amazon Resource Name ( ARN ) of the VTL devices .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setVTLDevices ( java . util . Collection ) } or { @ link # withVTLDevices ( java . util . Collection ) } if you want to
* override the existing values .
* @ param vTLDevices
* An array of VTL device objects composed of the Amazon Resource Name ( ARN ) of the VTL devices .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeVTLDevicesResult withVTLDevices ( VTLDevice ... vTLDevices ) { } } | if ( this . vTLDevices == null ) { setVTLDevices ( new com . amazonaws . internal . SdkInternalList < VTLDevice > ( vTLDevices . length ) ) ; } for ( VTLDevice ele : vTLDevices ) { this . vTLDevices . add ( ele ) ; } return this ; |
public class HBaseUtils { /** * Add a Collection of Columns to an Operation , Only Add Single Columns
* If Their Family Isn ' t Already Being Added .
* @ param columns
* Collection of columns to add to the operation
* @ param operation
* The HBase operation to add the columns to */
private static void addColumnsToOperation ( Collection < String > columns , Operation operation ) { } } | // Keep track of whole family additions
Set < String > familySet = new HashSet < String > ( ) ; // Iterate through each of the required columns
for ( String column : columns ) { // Split the column by : ( family : column )
String [ ] familyAndColumn = column . split ( ":" ) ; // Check if this is a family only
if ( familyAndColumn . length == 1 ) { // Add family to whole family additions , and add to scanner
familySet . add ( familyAndColumn [ 0 ] ) ; operation . addFamily ( Bytes . toBytes ( familyAndColumn [ 0 ] ) ) ; } else { // Add this column , as long as it ' s entire family wasn ' t added .
if ( ! familySet . contains ( familyAndColumn [ 0 ] ) ) { operation . addColumn ( Bytes . toBytes ( familyAndColumn [ 0 ] ) , Bytes . toBytes ( familyAndColumn [ 1 ] ) ) ; } } } |
public class ActivityChooserModel { /** * Sets the sorter for ordering activities based on historical data and an intent .
* @ param activitySorter The sorter .
* @ see ActivitySorter */
public void setActivitySorter ( ActivitySorter activitySorter ) { } } | synchronized ( mInstanceLock ) { if ( mActivitySorter == activitySorter ) { return ; } mActivitySorter = activitySorter ; if ( sortActivitiesIfNeeded ( ) ) { notifyChanged ( ) ; } } |
public class ArtifactsMojo { /** * Build the list of files from which digests should be generated .
* < p > The list is composed of the project main and attached artifacts . < / p >
* @ return the list of files that should be processed .
* @ see # hasValidFile ( org . apache . maven . artifact . Artifact ) */
@ Override protected List < ChecksumFile > getFilesToProcess ( ) { } } | List < ChecksumFile > files = new LinkedList < ChecksumFile > ( ) ; // Add project main artifact .
if ( hasValidFile ( project . getArtifact ( ) ) ) { files . add ( new ChecksumFile ( "" , project . getArtifact ( ) . getFile ( ) , project . getArtifact ( ) . getType ( ) , null ) ) ; } // Add projects attached .
if ( project . getAttachedArtifacts ( ) != null ) { for ( Artifact artifact : ( List < Artifact > ) project . getAttachedArtifacts ( ) ) { if ( hasValidFile ( artifact ) ) { files . add ( new ChecksumFile ( "" , artifact . getFile ( ) , artifact . getType ( ) , artifact . getClassifier ( ) ) ) ; } } } return files ; |
public class InjectionService { /** * Returns the received { @ code name } , { @ code id } and { @ link Gender } to the sender as a JSON list . */
@ Get ( "/param/{name}/{id}" ) public HttpResponse param ( @ Param String name , /* from path variable */
@ Param int id , /* from path variable and converted into integer */
@ Param Gender gender /* from query string and converted into enum */
) throws JsonProcessingException { } } | return HttpResponse . of ( HttpStatus . OK , MediaType . JSON_UTF_8 , mapper . writeValueAsBytes ( Arrays . asList ( name , id , gender ) ) ) ; |
public class AmazonConfigClient { /** * Returns a list of all pending aggregation requests .
* @ param describePendingAggregationRequestsRequest
* @ return Result of the DescribePendingAggregationRequests operation returned by the service .
* @ throws InvalidParameterValueException
* One or more of the specified parameters are invalid . Verify that your parameters are valid and try again .
* @ throws InvalidNextTokenException
* The specified next token is invalid . Specify the < code > nextToken < / code > string that was returned in the
* previous response to get the next page of results .
* @ throws InvalidLimitException
* The specified limit is outside the allowable range .
* @ sample AmazonConfig . DescribePendingAggregationRequests
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / config - 2014-11-12 / DescribePendingAggregationRequests "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public DescribePendingAggregationRequestsResult describePendingAggregationRequests ( DescribePendingAggregationRequestsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribePendingAggregationRequests ( request ) ; |
public class InetAddressPredicates { /** * Returns a { @ link Predicate } which returns { @ code true } if the given { @ link InetAddress } is in the
* range of a < a href = " https : / / tools . ietf . org / html / rfc4632 " > Classless Inter - domain Routing ( CIDR ) < / a > block .
* @ param baseAddress the base { @ link InetAddress } of a CIDR notation
* @ param maskBits the number of significant bits which describes its network portion */
public static Predicate < InetAddress > ofCidr ( InetAddress baseAddress , int maskBits ) { } } | requireNonNull ( baseAddress , "baseAddress" ) ; checkArgument ( maskBits >= 0 , "maskBits: %s (expected: >= 0)" , maskBits ) ; return ofCidr ( baseAddress , maskBits , maskBits ) ; |
public class AuthorizationCodeHandler { /** * refactored from Oauth SendErrorJson . Only usable for sending an http400. */
private void sendErrorJSON ( HttpServletResponse response , int statusCode , String errorCode , String errorDescription ) { } } | final String error = "error" ; final String error_description = "error_description" ; try { if ( errorCode != null ) { response . setStatus ( statusCode ) ; response . setHeader ( ClientConstants . REQ_CONTENT_TYPE_NAME , "application/json;charset=UTF-8" ) ; JSONObject responseJSON = new JSONObject ( ) ; responseJSON . put ( error , errorCode ) ; if ( errorDescription != null ) { responseJSON . put ( error_description , errorDescription ) ; } PrintWriter pw ; pw = response . getWriter ( ) ; pw . write ( responseJSON . toString ( ) ) ; pw . flush ( ) ; } else { response . sendError ( statusCode ) ; } } catch ( IOException e ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Internal error sending error message" , e ) ; try { response . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } catch ( IOException ioe ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "yet another internal error, give up" , ioe ) ; } } |
public class AtomPlacer3D { /** * Count and find first heavy atom ( s ) ( non Hydrogens ) in a chain .
* @ param molecule the reference molecule for searching the chain
* @ param chain chain to be searched
* @ return the atom number of the first heavy atom the number of heavy atoms in the chain */
public int [ ] findHeavyAtomsInChain ( IAtomContainer molecule , IAtomContainer chain ) { } } | int [ ] heavy = { - 1 , - 1 } ; int hc = 0 ; for ( int i = 0 ; i < chain . getAtomCount ( ) ; i ++ ) { if ( isHeavyAtom ( chain . getAtom ( i ) ) ) { if ( heavy [ 0 ] < 0 ) { heavy [ 0 ] = molecule . indexOf ( chain . getAtom ( i ) ) ; } hc ++ ; } } heavy [ 1 ] = hc ; return heavy ; |
public class sslservice_sslcertkey_binding { /** * Use this API to fetch sslservice _ sslcertkey _ binding resources of given name . */
public static sslservice_sslcertkey_binding [ ] get ( nitro_service service , String servicename ) throws Exception { } } | sslservice_sslcertkey_binding obj = new sslservice_sslcertkey_binding ( ) ; obj . set_servicename ( servicename ) ; sslservice_sslcertkey_binding response [ ] = ( sslservice_sslcertkey_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class ChainImpl { /** * { @ inheritDoc } */
@ Override public void addGroup ( Group group ) { } } | group . setChain ( this ) ; // Set the altlocs chain as well
for ( Group g : group . getAltLocs ( ) ) { g . setChain ( this ) ; } groups . add ( group ) ; // store the position internally for quick access of this group
String pdbResnum = null ; ResidueNumber resNum = group . getResidueNumber ( ) ; if ( resNum != null ) pdbResnum = resNum . toString ( ) ; if ( pdbResnum != null ) { Integer pos = groups . size ( ) - 1 ; // ARGH sometimes numbering in PDB files is confusing .
// e . g . PDB : 1sfe
/* * ATOM 620 N GLY 93 - 24.320 - 6.591 4.210 1.00 46.82 N
* ATOM 621 CA GLY 93 - 24.960 - 6.849 5.497 1.00 47.35 C
* ATOM 622 C GLY 93 - 26.076 - 5.873 5.804 1.00 47.24 C
* ATOM 623 O GLY 93 - 26.382 - 4.986 5.006 1.00 47.56 O
* and . . .
* HETATM 1348 O HOH 92 - 21.853 - 16.886 19.138 1.00 66.92 O
* HETATM 1349 O HOH 93 - 26.126 1.226 29.069 1.00 71.69 O
* HETATM 1350 O HOH 94 - 22.250 - 18.060 - 6.401 1.00 61.97 O */
// this check is to give in this case the entry priority that is an AminoAcid / comes first . . .
// a good example of same residue number for 2 residues is 3th3 , chain T , residue 201 ( a LYS and a sugar BGC covalently attached to it ) - JD 2016-03-09
if ( pdbResnumMap . containsKey ( pdbResnum ) ) { logger . warn ( "Adding residue {}({}) to chain {} but a residue with same residue number is already present: {}({}). Will add only the aminoacid residue (if any) to the lookup, lookups for that residue number won't work properly." , pdbResnum , group . getPDBName ( ) , getChainID ( ) , groups . get ( pdbResnumMap . get ( pdbResnum ) ) . getResidueNumber ( ) , groups . get ( pdbResnumMap . get ( pdbResnum ) ) . getPDBName ( ) ) ; if ( group instanceof AminoAcid ) pdbResnumMap . put ( pdbResnum , pos ) ; } else pdbResnumMap . put ( pdbResnum , pos ) ; } |
public class Streams { /** * Attempt to flush and close an array of < tt > OutputStream < / tt > s .
* @ param streams < tt > OutputStream < / tt > s to attempt to flush and close .
* @ return < tt > True < / tt > if all streams were flushed and closed , or
* < tt > false < / tt > if an exception was thrown . */
public static boolean fclose ( final OutputStream [ ] streams ) { } } | boolean success = true ; for ( OutputStream stream : streams ) { boolean rv = fclose ( stream ) ; if ( ! rv ) success = false ; } return success ; |
public class OptionIOSetting { /** * Sets the setting for a certain question . It will throw
* a CDKException when the setting is not valid . The first setting is
* setting 1. */
public void setSetting ( int setting ) throws CDKException { } } | if ( setting < settings . size ( ) + 1 && setting > 0 ) { this . setting = ( String ) settings . get ( setting - 1 ) ; } else { throw new CDKException ( "Setting " + setting + " does not exist." ) ; } |
public class OrderAnalyzer { /** * { @ inheritDoc } */
@ Override public OrderAnalyzerResult analyze ( ) { } } | setCurrentDate ( null ) ; setSeenEvent ( null ) ; final PersonAnalysisVisitor visitor = new PersonAnalysisVisitor ( ) ; person . accept ( visitor ) ; for ( final Attribute attribute : visitor . getTrimmedAttributes ( ) ) { basicOrderCheck ( attribute ) ; if ( isUnorderedEvent ( attribute ) ) { // We DO NOT pay attention to this event when
// doing logical order analysis .
continue ; } birthCheck ( attribute ) ; deathCheck ( attribute ) ; setSeenEvent ( attribute ) ; } familyOrderAnalyzer . analyze ( ) ; childrenOrderAnalyzer . analyze ( ) ; return getResult ( ) ; |
public class Swagger2MarkupConfigBuilder { /** * Specifies if the paths should be grouped by tags or stay as - is .
* @ param pathsGroupedBy the GroupBy enum
* @ return this builder */
public Swagger2MarkupConfigBuilder withPathsGroupedBy ( GroupBy pathsGroupedBy ) { } } | Validate . notNull ( pathsGroupedBy , "%s must not be null" , "pathsGroupedBy" ) ; config . pathsGroupedBy = pathsGroupedBy ; return this ; |
public class ImageMemoryCache { /** * Generates key for memory cache entry
* Format for memory cache key is [ imageUri ] _ [ width ] : [ height ] . */
public static String generateMemoryCacheEntryKey ( String imageUri , int width , int height ) { } } | return new StringBuilder ( imageUri ) . append ( URI_DIMENTION_SEPARATOR ) . append ( width ) . append ( WIDTH_HEIGHT_SEPARATOR ) . append ( height ) . toString ( ) ; |
public class WTreeRenderer { /** * Iterate of over the rows to render the tree items .
* @ param tree the WTree to render
* @ param mode the expand mode
* @ param model the tree model
* @ param rowIndex the current row index
* @ param xml the XML string builder
* @ param selectedRows the set of selected rows
* @ param expandedRows the set of expanded rows */
protected void paintItem ( final WTree tree , final WTree . ExpandMode mode , final TreeItemModel model , final List < Integer > rowIndex , final XmlStringBuilder xml , final Set < String > selectedRows , final Set < String > expandedRows ) { } } | String itemId = model . getItemId ( rowIndex ) ; boolean selected = selectedRows . remove ( itemId ) ; boolean expandable = model . isExpandable ( rowIndex ) && model . hasChildren ( rowIndex ) ; boolean expanded = expandedRows . remove ( itemId ) ; TreeItemImage image = model . getItemImage ( rowIndex ) ; String url = null ; if ( image != null ) { url = tree . getItemImageUrl ( image , itemId ) ; } xml . appendTagOpen ( "ui:treeitem" ) ; xml . appendAttribute ( "id" , tree . getItemIdPrefix ( ) + itemId ) ; xml . appendAttribute ( "label" , model . getItemLabel ( rowIndex ) ) ; xml . appendOptionalUrlAttribute ( "imageUrl" , url ) ; xml . appendOptionalAttribute ( "selected" , selected , "true" ) ; xml . appendOptionalAttribute ( "expandable" , expandable , "true" ) ; xml . appendOptionalAttribute ( "open" , expandable && expanded , "true" ) ; xml . appendClose ( ) ; if ( expandable && ( mode == WTree . ExpandMode . CLIENT || expanded ) ) { // Get actual child count
int children = model . getChildCount ( rowIndex ) ; if ( children > 0 ) { for ( int i = 0 ; i < children ; i ++ ) { // Add next level
List < Integer > nextRow = new ArrayList < > ( rowIndex ) ; nextRow . add ( i ) ; paintItem ( tree , mode , model , nextRow , xml , selectedRows , expandedRows ) ; } } } xml . appendEndTag ( "ui:treeitem" ) ; |
public class ControlPoint { /** * Queues a task to run when the request controller allows it . This allows tasks not to be dropped when the max request
* limit has been hit . If the container has been suspended then this
* Note that the task will be run withing the context of a { @ link # beginRequest ( ) } call , if the task
* is executed there is no need to invoke on the control point again .
* @ param task The task to run
* @ param taskExecutor The executor to run the task in */
public void forceQueueTask ( Runnable task , Executor taskExecutor ) { } } | controller . queueTask ( this , task , taskExecutor , - 1 , null , false , true ) ; |
public class SIMPUtils { /** * Set up guaranteed delivery message properties . These are compulsory properties
* on a control message and are therefore set throughout the code . The method
* makes it easier to cope with new properties in the message .
* @ param msg ControlMessage on which to set properties . */
public static void setGuaranteedDeliveryProperties ( ControlMessage msg , SIBUuid8 sourceMEUuid , SIBUuid8 targetMEUuid , SIBUuid12 streamId , SIBUuid12 gatheringTargetDestUuid , SIBUuid12 targetDestUuid , ProtocolType protocolType , byte protocolVersion ) { } } | // Remote to local message properties
msg . setGuaranteedSourceMessagingEngineUUID ( sourceMEUuid ) ; msg . setGuaranteedTargetMessagingEngineUUID ( targetMEUuid ) ; msg . setGuaranteedStreamUUID ( streamId ) ; msg . setGuaranteedGatheringTargetUUID ( gatheringTargetDestUuid ) ; msg . setGuaranteedTargetDestinationDefinitionUUID ( targetDestUuid ) ; if ( protocolType != null ) msg . setGuaranteedProtocolType ( protocolType ) ; msg . setGuaranteedProtocolVersion ( protocolVersion ) ; |
public class EvolvingImagesWorker { /** * Starts the evolution worker with the given evolution result callback . The
* callback may be null .
* @ param callback the { @ code EvolutionResult } callback . The first parameter
* contains the current result and the second the best . */
public void start ( final BiConsumer < EvolutionResult < PolygonGene , Double > , EvolutionResult < PolygonGene , Double > > callback ) { } } | final Thread thread = new Thread ( ( ) -> { final MinMax < EvolutionResult < PolygonGene , Double > > best = MinMax . of ( ) ; _engine . stream ( ) . limit ( result -> ! Thread . currentThread ( ) . isInterrupted ( ) ) . peek ( best ) . forEach ( r -> { waiting ( ) ; if ( callback != null ) { callback . accept ( r , best . getMax ( ) ) ; } } ) ; } ) ; thread . start ( ) ; _thread = thread ; |
public class PortletAdministrationHelper { /** * Persist a new or edited PortletDefinition from a form , replacing existing values .
* @ param publisher { @ code IPerson } that requires permission to save this definition
* @ param form form data to persist
* @ return new { @ code PortletDefinitionForm } for this portlet ID */
public PortletDefinitionForm savePortletRegistration ( IPerson publisher , PortletDefinitionForm form ) throws Exception { } } | logger . trace ( "In savePortletRegistration() - for: {}" , form . getPortletName ( ) ) ; /* TODO : Service - Layer Security Reboot ( great need of refactoring with a community - approved plan in place ) */
// User must have the selected lifecycle permission over AT LEAST ONE
// category in which this portlet resides . ( This is the same check that
// is made when the user enters the lifecycle - selection step in the wizard . )
if ( ! hasLifecyclePermission ( publisher , form . getLifecycleState ( ) , form . getCategories ( ) ) ) { logger . warn ( "User '" + publisher . getUserName ( ) + "' attempted to save the following portlet without the selected MANAGE permission: " + form ) ; throw new SecurityException ( "Not Authorized" ) ; } if ( ! form . isNew ( ) ) { // User must have the previous lifecycle permission
// in AT LEAST ONE previous category as well
IPortletDefinition def = this . portletDefinitionRegistry . getPortletDefinition ( form . getId ( ) ) ; Set < PortletCategory > categories = portletCategoryRegistry . getParentCategories ( def ) ; SortedSet < JsonEntityBean > categoryBeans = new TreeSet < > ( ) ; for ( PortletCategory cat : categories ) { categoryBeans . add ( new JsonEntityBean ( cat ) ) ; } if ( ! hasLifecyclePermission ( publisher , def . getLifecycleState ( ) , categoryBeans ) ) { logger . warn ( "User '" + publisher . getUserName ( ) + "' attempted to save the following portlet without the previous MANAGE permission: " + form ) ; throw new SecurityException ( "Not Authorized" ) ; } } if ( form . isNew ( ) || portletDefinitionRegistry . getPortletDefinition ( form . getId ( ) ) . getType ( ) . getId ( ) != form . getTypeId ( ) ) { // User must have access to the selected CPD if s / he selected it in this interaction
final int selectedTypeId = form . getTypeId ( ) ; final PortletPublishingDefinition cpd = portletPublishingDefinitionDao . getChannelPublishingDefinition ( selectedTypeId ) ; final Map < IPortletType , PortletPublishingDefinition > allowableCpds = this . getAllowableChannelPublishingDefinitions ( publisher ) ; if ( ! allowableCpds . containsValue ( cpd ) ) { logger . warn ( "User '" + publisher . getUserName ( ) + "' attempted to administer the following portlet without the selected " + IPermission . PORTLET_MANAGER_SELECT_PORTLET_TYPE + " permission: " + form ) ; throw new SecurityException ( "Not Authorized" ) ; } } // create the principal array from the form ' s principal list - - only principals with
// permissions
final Set < IGroupMember > subscribePrincipalSet = new HashSet < > ( form . getPrincipals ( ) . size ( ) ) ; final Set < IGroupMember > browsePrincipalSet = new HashSet < > ( form . getPrincipals ( ) . size ( ) ) ; final Set < IGroupMember > configurePrincipalSet = new HashSet < > ( form . getPrincipals ( ) . size ( ) ) ; for ( JsonEntityBean bean : form . getPrincipals ( ) ) { final String subscribePerm = bean . getTypeAndIdHash ( ) + "_" + IPermission . PORTLET_SUBSCRIBER_ACTIVITY ; final String browsePerm = bean . getTypeAndIdHash ( ) + "_" + IPermission . PORTLET_BROWSE_ACTIVITY ; final String configurePerm = bean . getTypeAndIdHash ( ) + "_" + IPermission . PORTLET_MODE_CONFIG ; final EntityEnum entityEnum = bean . getEntityType ( ) ; final IGroupMember principal = entityEnum . isGroup ( ) ? ( GroupService . findGroup ( bean . getId ( ) ) ) : ( GroupService . getGroupMember ( bean . getId ( ) , entityEnum . getClazz ( ) ) ) ; if ( form . getPermissions ( ) . contains ( subscribePerm ) ) { logger . info ( "In savePortletRegistration() - Found a subscribePerm for principal: {}" , principal ) ; subscribePrincipalSet . add ( principal ) ; } if ( form . getPermissions ( ) . contains ( browsePerm ) ) { logger . info ( "In savePortletRegistration() - Found a browsePerm for principal: {}" , principal ) ; browsePrincipalSet . add ( principal ) ; } if ( form . getPermissions ( ) . contains ( configurePerm ) ) { logger . info ( "In savePortletRegistration() - Found a configurePerm for principal: {}" , principal ) ; configurePrincipalSet . add ( principal ) ; } } // create the category list from the form ' s category bean list
List < PortletCategory > categories = new ArrayList < > ( ) ; for ( JsonEntityBean category : form . getCategories ( ) ) { String id = category . getId ( ) ; String iCatID = id . startsWith ( "cat" ) ? id . substring ( 3 ) : id ; categories . add ( portletCategoryRegistry . getPortletCategory ( iCatID ) ) ; } final IPortletType portletType = portletTypeRegistry . getPortletType ( form . getTypeId ( ) ) ; if ( portletType == null ) { throw new IllegalArgumentException ( "No IPortletType exists for ID " + form . getTypeId ( ) ) ; } IPortletDefinition portletDef ; if ( form . getId ( ) == null ) { portletDef = new PortletDefinitionImpl ( portletType , form . getFname ( ) , form . getName ( ) , form . getTitle ( ) , form . getApplicationId ( ) , form . getPortletName ( ) , form . isFramework ( ) ) ; } else { portletDef = portletDefinitionRegistry . getPortletDefinition ( form . getId ( ) ) ; portletDef . setType ( portletType ) ; portletDef . setFName ( form . getFname ( ) ) ; portletDef . setName ( form . getName ( ) ) ; portletDef . setTitle ( form . getTitle ( ) ) ; portletDef . getPortletDescriptorKey ( ) . setWebAppName ( form . getApplicationId ( ) ) ; portletDef . getPortletDescriptorKey ( ) . setPortletName ( form . getPortletName ( ) ) ; portletDef . getPortletDescriptorKey ( ) . setFrameworkPortlet ( form . isFramework ( ) ) ; } portletDef . setDescription ( form . getDescription ( ) ) ; portletDef . setTimeout ( form . getTimeout ( ) ) ; // Make parameters ( NB : these are different from preferences ) in the
// portletDef reflect the state of the form , in case any have changed .
for ( String key : form . getParameters ( ) . keySet ( ) ) { String value = form . getParameters ( ) . get ( key ) . getValue ( ) ; if ( ! StringUtils . isBlank ( value ) ) { portletDef . addParameter ( key , value ) ; } } portletDef . addParameter ( IPortletDefinition . EDITABLE_PARAM , Boolean . toString ( form . isEditable ( ) ) ) ; portletDef . addParameter ( IPortletDefinition . CONFIGURABLE_PARAM , Boolean . toString ( form . isConfigurable ( ) ) ) ; portletDef . addParameter ( IPortletDefinition . HAS_HELP_PARAM , Boolean . toString ( form . isHasHelp ( ) ) ) ; portletDef . addParameter ( IPortletDefinition . HAS_ABOUT_PARAM , Boolean . toString ( form . isHasAbout ( ) ) ) ; // Now add portlet preferences
List < IPortletPreference > preferenceList = new ArrayList < > ( ) ; for ( String key : form . getPortletPreferences ( ) . keySet ( ) ) { List < String > prefValues = form . getPortletPreferences ( ) . get ( key ) . getValue ( ) ; if ( prefValues != null && prefValues . size ( ) > 0 ) { String [ ] values = prefValues . toArray ( new String [ prefValues . size ( ) ] ) ; BooleanAttribute readOnly = form . getPortletPreferenceReadOnly ( ) . get ( key ) ; preferenceList . add ( new PortletPreferenceImpl ( key , readOnly . getValue ( ) , values ) ) ; } } portletDef . setPortletPreferences ( preferenceList ) ; // Lastly update the PortletDefinition ' s lifecycle state & lifecycle - related metadata
updateLifecycleState ( form , portletDef , publisher ) ; // The final parameter of IGroupMembers is used to set the initial SUBSCRIBE permission set
portletPublishingService . savePortletDefinition ( portletDef , publisher , categories , new ArrayList < > ( subscribePrincipalSet ) ) ; // updatePermissions ( portletDef , subscribePrincipalSet ,
// IPermission . PORTLET _ SUBSCRIBER _ ACTIVITY ) ;
updatePermissions ( portletDef , browsePrincipalSet , IPermission . PORTAL_SUBSCRIBE , IPermission . PORTLET_BROWSE_ACTIVITY ) ; updatePermissions ( portletDef , configurePrincipalSet , IPermission . PORTAL_PUBLISH , IPermission . PORTLET_MODE_CONFIG ) ; return this . createPortletDefinitionForm ( publisher , portletDef . getPortletDefinitionId ( ) . getStringId ( ) ) ; |
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcStructuralLoadPlanarForce ( ) { } } | if ( ifcStructuralLoadPlanarForceEClass == null ) { ifcStructuralLoadPlanarForceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 646 ) ; } return ifcStructuralLoadPlanarForceEClass ; |
public class GosuEscapeUtil { /** * Escape any special characters in the string , using the Java escape syntax .
* For example any tabs become \ t , newlines become \ n etc .
* @ return the escaped string . Returns the original string unchanged if it
* contains no special characters . */
public static String escapeForJava ( String string ) { } } | String result ; StringBuffer resultBuffer = null ; for ( int i = 0 , length = string . length ( ) ; i < length ; i ++ ) { char ch = string . charAt ( i ) ; String escape = escapeForJava ( ch ) ; if ( escape != null ) { if ( resultBuffer == null ) { resultBuffer = new StringBuffer ( string ) ; resultBuffer . setLength ( i ) ; } resultBuffer . append ( escape ) ; } else if ( resultBuffer != null ) { resultBuffer . append ( ch ) ; } } result = ( resultBuffer != null ) ? resultBuffer . toString ( ) : string ; return result ; |
public class Validation { /** * method to check the monomer ' s validation
* @ param str
* monomer id
* @ param type
* type of monomer
* @ return true if the monomer is valid , false otherwise
* @ throws ChemistryException
* if the Chemistry Engine can not be initialized
* @ throws MonomerLoadingException
* if momomers can not be loaded
* @ throws org . helm . notation2 . parser . exceptionparser . NotationException */
private static boolean isMonomerValid ( String str , String type ) throws ChemistryException , MonomerLoadingException , org . helm . notation2 . parser . exceptionparser . NotationException { } } | LOG . info ( "Is Monomer valid: " + str ) ; MonomerFactory monomerFactory = null ; monomerFactory = MonomerFactory . getInstance ( ) ; /* Search in Database */
MonomerStore monomerStore = monomerFactory . getMonomerStore ( ) ; if ( monomerStore . hasMonomer ( type , str ) ) { LOG . info ( "Monomer is located in the database: " + str ) ; return true ; } else if ( str . charAt ( 0 ) == '[' && str . charAt ( str . length ( ) - 1 ) == ']' && monomerStore . hasMonomer ( type , str . substring ( 1 , str . length ( ) - 1 ) ) ) { LOG . info ( "Monomer is located in the database: " + str ) ; return true ; } /* polymer type is Blob : accept all */
else if ( type . equals ( "BLOB" ) ) { LOG . info ( "Blob's Monomer Type: " + str ) ; return true ; } /* new unknown monomer for peptide */
else if ( type . equals ( "PEPTIDE" ) && str . equals ( "X" ) ) { LOG . info ( "Unknown monomer type for peptide: " + str ) ; return true ; } /* new unknown monomer for peptide */
else if ( type . equals ( "RNA" ) && str . equals ( "N" ) ) { LOG . info ( "Unknown monomer type for rna: " + str ) ; return true ; } /* new unknown types */
else if ( str . equals ( "?" ) || str . equals ( "_" ) ) { LOG . info ( "Unknown types: " + str ) ; return true ; } /* nucleotide */
else if ( type . equals ( "RNA" ) ) { List < String > elements = NucleotideParser . getMonomerIDListFromNucleotide ( str ) ; for ( String element : elements ) { if ( ! ( monomerStore . hasMonomer ( type , element ) ) ) { /* SMILES Check */
if ( element . startsWith ( "[" ) && element . endsWith ( "]" ) ) { element = element . substring ( 1 , element . length ( ) - 1 ) ; } if ( ! Chemistry . getInstance ( ) . getManipulator ( ) . validateSMILES ( element ) ) { return false ; } } } LOG . info ( "Nucleotide type for RNA: " + str ) ; return true ; } LOG . info ( "SMILES Check" ) ; /* SMILES Check */
if ( str . charAt ( 0 ) == '[' && str . charAt ( str . length ( ) - 1 ) == ']' ) { str = str . substring ( 1 , str . length ( ) - 1 ) ; } return Chemistry . getInstance ( ) . getManipulator ( ) . validateSMILES ( str ) ; |
public class IOManagerAsync { RequestQueue < ReadRequest > getReadRequestQueue ( FileIOChannel . ID channelID ) { } } | return this . readers [ channelID . getThreadNum ( ) ] . requestQueue ; |
public class AbstractMemberWriter { /** * Add the navigation summary link .
* @ param members members to be linked
* @ param visibleMemberMap the visible inherited members map
* @ param liNav the content tree to which the navigation summary link will be added */
protected void addNavSummaryLink ( SortedSet < ? extends Element > members , VisibleMemberMap visibleMemberMap , Content liNav ) { } } | if ( ! members . isEmpty ( ) ) { liNav . addContent ( getNavSummaryLink ( null , true ) ) ; return ; } TypeElement superClass = utils . getSuperClass ( typeElement ) ; while ( superClass != null ) { if ( visibleMemberMap . hasMembers ( superClass ) ) { liNav . addContent ( getNavSummaryLink ( superClass , true ) ) ; return ; } superClass = utils . getSuperClass ( superClass ) ; } liNav . addContent ( getNavSummaryLink ( null , false ) ) ; |
public class DAOGenerator { /** * Curated will correct The ModelDef
* @ param curated
* @ throws Exception */
public void start ( ) throws Exception { } } | if ( cleanupDirectory ) { CleanUp . start ( conf ) ; } IDatabaseDev db = ( IDatabaseDev ) em . getDB ( ) ; // TODO make sure DB platform can be used for development
ModelDefinitionProvider provider = new ModelDefinitionProvider ( db , conf . dbUser , null , conf . includeSchema ) ; ModelDef [ ] origModelList = provider . getTableDefinitions ( ) ; new ModelMetaDataGenerator ( ) . start ( origModelList , conf ) ; // TODO : proper support for explicit ModelDefinitions
ModelDef [ ] withExplecitList = overrideModelDefFromExplicit ( origModelList , explicitMeta ) ; ModelDef [ ] modelList = correctModelList ( withExplecitList ) ; new DAOClassGenerator ( ) . start ( modelList , conf , true , false ) ; new BeanGenerator ( ) . start ( modelList , conf ) ; new MapperGenerator ( ) . start ( modelList , conf ) ; ModelDef [ ] modelListOwned = setModelOwners ( modelList , tableGroups ) ; ModelDef [ ] modelListChilded = setDirectChildren ( modelListOwned ) ; new ColumnNameGenerator ( ) . start ( modelListChilded , conf ) ; new TableNameGenerator ( ) . start ( modelListChilded , conf ) ; new SchemaTableGenerator ( ) . start ( modelListChilded , conf ) ; new TableColumnGenerator ( ) . start ( modelListChilded , conf ) ; // new ModelMetaDataGenerator ( ) . start ( modelListChilded , conf ) ;
new DAOInstanceFactoryGenerator ( ) . start ( modelListChilded , conf ) ; new ModelToDAOConversionGenerator ( ) . start ( modelListChilded , conf ) ; |
public class FloatWindowApi { /** * activity onResume 事件回调
* @ param activity 发生 onResume 事件的activity */
@ Override public void onActivityResume ( Activity activity ) { } } | HMSAgentLog . d ( "autoShowFloatWindow:" + isShowFloatWindowCalled ) ; if ( isShowFloatWindowCalled ) { showFinal ( true , null , ApiClientMgr . INST . getApiClient ( ) ) ; } |
public class MathUtils { /** * / * [ deutsch ]
* < p > Macht einen sicheren TypeCast auf ein int - Primitive . < / p >
* @ param num long - primitive
* @ return int as type - cast
* @ throws ArithmeticException if int - range overflows */
public static int safeCast ( long num ) { } } | if ( num < Integer . MIN_VALUE || num > Integer . MAX_VALUE ) { throw new ArithmeticException ( "Out of range: " + num ) ; } else { return ( int ) num ; } |
public class XmlUtil { /** * Wraps systemId with a " jstl : " prefix to prevent the parser from
* thinking that the URI is truly relative and resolving it against
* the current directory in the filesystem . */
private static String wrapSystemId ( String systemId ) { } } | if ( systemId == null ) { return "jstl:" ; } else if ( UrlUtil . isAbsoluteUrl ( systemId ) ) { return systemId ; } else { return ( "jstl:" + systemId ) ; } |
public class DefaultParserFactory { /** * ( non - Javadoc )
* @ see net . sf . flatpack . PZParserFactory # newFixedWidthParser ( java . sql . Connection ,
* java . io . InputStream , java . lang . String ) */
@ Override public Parser newFixedLengthParser ( final Connection con , final InputStream dataSourceStream , final String dataDefinition ) { } } | return new DBFixedLengthParser ( con , dataSourceStream , dataDefinition ) ; |
public class CSSImportPostProcessor { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . postprocess .
* AbstractChainedResourceBundlePostProcessor # doPostProcessBundle ( net . jawr .
* web . resource . bundle . postprocess . BundleProcessingStatus ,
* java . lang . StringBuffer ) */
@ Override protected StringBuffer doPostProcessBundle ( BundleProcessingStatus status , StringBuffer bundleData ) throws IOException { } } | String data = bundleData . toString ( ) ; // Rewrite each css url path
Matcher matcher = IMPORT_PATTERN . matcher ( data ) ; StringBuffer sb = new StringBuffer ( ) ; while ( matcher . find ( ) ) { String content = getCssPathContent ( matcher . group ( 3 ) , matcher . group ( 4 ) , status ) ; matcher . appendReplacement ( sb , RegexUtil . adaptReplacementToMatcher ( content ) ) ; } matcher . appendTail ( sb ) ; return sb ; |
public class Instantiators { /** * Creates a converter for { @ code klass } . */
public static < T > Converter < T > createConverter ( Class < T > klass , InstantiatorModule ... modules ) { } } | return createConverterForType ( klass , modules ) ; |
public class ChoiceFormat { /** * Returns pattern with formatted double .
* @ param number number to be formatted & substituted .
* @ param toAppendTo where text is appended .
* @ param status ignore no useful status is returned . */
public StringBuffer format ( double number , StringBuffer toAppendTo , FieldPosition status ) { } } | // find the number
int i ; for ( i = 0 ; i < choiceLimits . length ; ++ i ) { if ( ! ( number >= choiceLimits [ i ] ) ) { // same as number < choiceLimits , except catchs NaN
break ; } } -- i ; if ( i < 0 ) i = 0 ; // return either a formatted number , or a string
return toAppendTo . append ( choiceFormats [ i ] ) ; |
public class GVRVertexBuffer { /** * Retrieves a vertex attribute as an integer array .
* The attribute name must be one of the
* attributes named in the descriptor passed to the constructor .
* @ param attributeName name of the attribute to update
* @ throws IllegalArgumentException if attribute name not in descriptor vertex attribute is not < i > int < / i >
* @ see # setIntVec ( String , IntBuffer )
* @ see # getIntArray ( String ) */
public int [ ] getIntArray ( String attributeName ) { } } | int [ ] array = NativeVertexBuffer . getIntArray ( getNative ( ) , attributeName ) ; if ( array == null ) { throw new IllegalArgumentException ( "Attribute name " + attributeName + " cannot be accessed" ) ; } return array ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.