signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NginxStubStatusParser { /** * Parse NGINX stub status into StatusData & lt ; NginxMetrics & gt ; . * @ param nginxStatus retrieved stub status . * @ return parsed metrics , put into StatusData . */ public StatusData < NginxMetrics > parse ( String nginxStatus ) { } }
StatusData < NginxMetrics > status = new StatusData < > ( ) ; if ( StringUtils . isEmpty ( nginxStatus ) ) { throw new IllegalArgumentException ( "nginx status is empty!" ) ; } else { Matcher matcher = pattern . matcher ( nginxStatus ) ; if ( matcher . matches ( ) ) { status . put ( NginxMetrics . ACTIVE , Long . parseLong ( matcher . group ( 1 ) ) ) ; Long accepted = Long . parseLong ( matcher . group ( 2 ) ) ; status . put ( NginxMetrics . ACCEPTED , accepted ) ; status . put ( NginxMetrics . ACCEPTEDPERSECOND , accepted ) ; Long handled = Long . parseLong ( matcher . group ( 3 ) ) ; status . put ( NginxMetrics . HANDLED , handled ) ; status . put ( NginxMetrics . HANDLEDPERSECOND , handled ) ; Long dropped = handled - accepted ; status . put ( NginxMetrics . DROPPED , dropped ) ; status . put ( NginxMetrics . DROPPEDPERSECOND , dropped ) ; Long requests = Long . parseLong ( matcher . group ( 4 ) ) ; status . put ( NginxMetrics . REQUESTS , requests ) ; status . put ( NginxMetrics . REQUESTSPERSECOND , requests ) ; status . put ( NginxMetrics . READING , Long . parseLong ( matcher . group ( 5 ) ) ) ; status . put ( NginxMetrics . WRITING , Long . parseLong ( matcher . group ( 6 ) ) ) ; status . put ( NginxMetrics . WAITING , Long . parseLong ( matcher . group ( 7 ) ) ) ; } else { String message = "NginxStubStatusParser can't parse nginx status response: [" + nginxStatus + "]" ; throw new IllegalArgumentException ( message ) ; } } return status ;
public class AbstractDiscretePartitionRefiner { /** * Setup the group and refiner ; it is important to call this method before * calling { @ link # refine } otherwise the refinement process will fail . * @ param group a group ( possibly empty ) of automorphisms * @ param refiner the equitable refiner */ public void setup ( PermutationGroup group , EquitablePartitionRefiner refiner ) { } }
this . bestExist = false ; this . best = null ; this . group = group ; this . equitableRefiner = refiner ;
public class JsonServiceDocumentWriter { /** * Build an embedded json object that will have key - value attributes like * ' name ' and ' url ' ( they are MUST ) , ' title ' and ' kind ' . * @ param jsonGenerator jsonGenerator * @ param entity entitySet or singleton * @ throws IOException */ private void writeObject ( JsonGenerator jsonGenerator , Object entity ) throws IOException { } }
jsonGenerator . writeStartObject ( ) ; writeName ( jsonGenerator , entity ) ; writeKind ( jsonGenerator , entity ) ; writeURL ( jsonGenerator , entity ) ; jsonGenerator . writeEndObject ( ) ;
public class DescribeAnalysisSchemesRequest { /** * The analysis schemes you want to describe . * @ param analysisSchemeNames * The analysis schemes you want to describe . */ public void setAnalysisSchemeNames ( java . util . Collection < String > analysisSchemeNames ) { } }
if ( analysisSchemeNames == null ) { this . analysisSchemeNames = null ; return ; } this . analysisSchemeNames = new com . amazonaws . internal . SdkInternalList < String > ( analysisSchemeNames ) ;
public class ClassNode { /** * Specify the class represented by this ` ClassNode ` implements * an interface specified by the given name * @ param name the name of the interface class * @ return this ` ClassNode ` instance */ public ClassNode addInterface ( String name ) { } }
ClassNode intf = infoBase . node ( name ) ; addInterface ( intf ) ; return this ;
public class DescribeConditionalForwardersRequest { /** * The fully qualified domain names ( FQDN ) of the remote domains for which to get the list of associated conditional * forwarders . If this member is null , all conditional forwarders are returned . * @ return The fully qualified domain names ( FQDN ) of the remote domains for which to get the list of associated * conditional forwarders . If this member is null , all conditional forwarders are returned . */ public java . util . List < String > getRemoteDomainNames ( ) { } }
if ( remoteDomainNames == null ) { remoteDomainNames = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return remoteDomainNames ;
public class TreeHashGenerator { /** * Calculates a hex encoded binary hash using a tree hashing algorithm for * the data in the specified file . * @ param file * The file containing the data to hash . * @ return The hex encoded binary tree hash for the data in the specified * file . * @ throws AmazonClientException * If any problems were encountered reading the data or * computing the hash . */ public static String calculateTreeHash ( File file ) throws AmazonClientException { } }
ResettableInputStream is = null ; try { is = new ResettableInputStream ( file ) ; return calculateTreeHash ( is ) ; } catch ( IOException e ) { throw new AmazonClientException ( "Unable to compute hash for file: " + file . getAbsolutePath ( ) , e ) ; } finally { if ( is != null ) is . release ( ) ; }
public class CryptoFileSystemProvider { /** * Typesafe alternative to { @ link FileSystems # newFileSystem ( URI , Map ) } . Default way to retrieve a CryptoFS instance . * @ param pathToVault Path to this vault ' s storage location * @ param properties Parameters used during initialization of the file system * @ return a new file system * @ throws FileSystemNeedsMigrationException if the vault format needs to get updated and < code > properties < / code > did not contain a flag for implicit migration . * @ throws IOException if an I / O error occurs creating the file system */ public static CryptoFileSystem newFileSystem ( Path pathToVault , CryptoFileSystemProperties properties ) throws FileSystemNeedsMigrationException , IOException { } }
URI uri = CryptoFileSystemUri . create ( pathToVault . toAbsolutePath ( ) ) ; return ( CryptoFileSystem ) FileSystems . newFileSystem ( uri , properties ) ;
public class CommonOps_ZDRM { /** * Extracts the diagonal elements ' src ' write it to the ' dst ' vector . ' dst ' * can either be a row or column vector . * @ param src Matrix whose diagonal elements are being extracted . Not modified . * @ param dst A vector the results will be written into . Modified . */ public static void extractDiag ( ZMatrixRMaj src , ZMatrixRMaj dst ) { } }
int N = Math . min ( src . numRows , src . numCols ) ; // reshape if it ' s not the right size if ( ! MatrixFeatures_ZDRM . isVector ( dst ) || dst . numCols * dst . numCols != N ) { dst . reshape ( N , 1 ) ; } for ( int i = 0 ; i < N ; i ++ ) { int index = src . getIndex ( i , i ) ; dst . data [ i * 2 ] = src . data [ index ] ; dst . data [ i * 2 + 1 ] = src . data [ index + 1 ] ; }
public class ClassUtils { /** * Extracts the package name from the given class object */ public static String getPackage ( Class < ? > cls ) { } }
// cls . getPackage ( ) sometimes returns null , in which case fall back to string massaging . java . lang . Package pkg = cls . isArray ( ) ? cls . getComponentType ( ) . getPackage ( ) : cls . getPackage ( ) ; if ( pkg == null ) { int dotPos ; int dolPos = cls . getName ( ) . indexOf ( '$' ) ; if ( dolPos > 0 ) { // we have nested classes , so adjust dotpos to before first $ dotPos = cls . getName ( ) . substring ( 0 , dolPos ) . lastIndexOf ( '.' ) ; } else { dotPos = cls . getName ( ) . lastIndexOf ( '.' ) ; } if ( dotPos > 0 ) { return cls . getName ( ) . substring ( 0 , dotPos ) ; } else { // must be default package . return "" ; } } else { return pkg . getName ( ) ; }
public class TreeWitnessRewriter { /** * rewrites a given connected CQ with the rules put into output */ private Collection < CQIE > rewriteCC ( QueryConnectedComponent cc , Function headAtom , Multimap < Predicate , CQIE > edgeDP ) { } }
List < CQIE > outputRules = new LinkedList < > ( ) ; TreeWitnessSet tws = TreeWitnessSet . getTreeWitnesses ( cc , reasoner , generators , immutabilityTools ) ; if ( cc . hasNoFreeTerms ( ) ) { if ( ! cc . isDegenerate ( ) || cc . getLoop ( ) != null ) for ( Function a : getAtomsForGenerators ( tws . getGeneratorsOfDetachedCC ( ) , getFreshVariable ( ) ) ) { outputRules . add ( datalogFactory . getCQIE ( headAtom , a ) ) ; } } // COMPUTE AND STORE TREE WITNESS FORMULAS for ( TreeWitness tw : tws . getTWs ( ) ) { log . debug ( "TREE WITNESS: {}" , tw ) ; List < Function > twf = new LinkedList < > ( ) ; // equality atoms Iterator < Term > i = tw . getRoots ( ) . iterator ( ) ; Term r0 = i . next ( ) ; while ( i . hasNext ( ) ) twf . add ( termFactory . getFunctionEQ ( i . next ( ) , r0 ) ) ; // root atoms for ( Function a : tw . getRootAtoms ( ) ) { if ( ! ( a . getFunctionSymbol ( ) instanceof TriplePredicate ) ) throw new MinorOntopInternalBugException ( "A triple atom was expected: " + a ) ; boolean isClass = ( ( TriplePredicate ) a . getFunctionSymbol ( ) ) . getClassIRI ( a . getTerms ( ) . stream ( ) . map ( immutabilityTools :: convertIntoImmutableTerm ) . collect ( ImmutableCollectors . toList ( ) ) ) . isPresent ( ) ; twf . add ( isClass ? atomFactory . getMutableTripleAtom ( r0 , a . getTerm ( 1 ) , a . getTerm ( 2 ) ) : atomFactory . getMutableTripleAtom ( r0 , a . getTerm ( 1 ) , r0 ) ) ; } List < Function > genAtoms = getAtomsForGenerators ( tw . getGenerators ( ) , r0 ) ; boolean subsumes = false ; // for ( Function a : genAtoms ) // if ( twf . subsumes ( a ) ) { // subsumes = true ; // log . debug ( " TWF { } SUBSUMES { } " , twf . getAllAtoms ( ) , a ) ; // break ; List < List < Function > > twfs = new ArrayList < > ( subsumes ? 1 : genAtoms . size ( ) ) ; // if ( ! subsumes ) { for ( Function a : genAtoms ) { LinkedList < Function > twfa = new LinkedList < > ( twf ) ; twfa . add ( a ) ; twfs . add ( twfa ) ; } // else // twfs . add ( twf . getAllAtoms ( ) ) ; tw . setFormula ( twfs ) ; } final String headURI = headAtom . getFunctionSymbol ( ) . getName ( ) ; if ( ! cc . isDegenerate ( ) ) { if ( tws . hasConflicts ( ) ) { // there are conflicting tree witnesses // use compact exponential rewriting by enumerating all compatible subsets of tree witnesses CompatibleTreeWitnessSetIterator iterator = tws . getIterator ( ) ; while ( iterator . hasNext ( ) ) { Collection < TreeWitness > compatibleTWs = iterator . next ( ) ; log . debug ( "COMPATIBLE: {}" , compatibleTWs ) ; LinkedList < Function > mainbody = new LinkedList < Function > ( ) ; for ( Edge edge : cc . getEdges ( ) ) { boolean contained = false ; for ( TreeWitness tw : compatibleTWs ) if ( tw . getDomain ( ) . contains ( edge . getTerm0 ( ) ) && tw . getDomain ( ) . contains ( edge . getTerm1 ( ) ) ) { contained = true ; log . debug ( "EDGE {} COVERED BY {}" , edge , tw ) ; break ; } if ( ! contained ) { log . debug ( "EDGE {} NOT COVERED BY ANY TW" , edge ) ; mainbody . addAll ( edge . getAtoms ( ) ) ; } } for ( TreeWitness tw : compatibleTWs ) { Function twAtom = getHeadAtom ( headURI , "_TW_" + ( edgeDP . size ( ) + 1 ) , cc . getVariables ( ) ) ; mainbody . add ( twAtom ) ; for ( List < Function > twfa : tw . getFormula ( ) ) edgeDP . put ( twAtom . getFunctionSymbol ( ) , datalogFactory . getCQIE ( twAtom , twfa ) ) ; } mainbody . addAll ( cc . getNonDLAtoms ( ) ) ; outputRules . add ( datalogFactory . getCQIE ( headAtom , mainbody ) ) ; } } else { // no conflicting tree witnesses // use polynomial tree witness rewriting by treating each edge independently LinkedList < Function > mainbody = new LinkedList < > ( ) ; for ( Edge edge : cc . getEdges ( ) ) { log . debug ( "EDGE {}" , edge ) ; Function edgeAtom = null ; for ( TreeWitness tw : tws . getTWs ( ) ) if ( tw . getDomain ( ) . contains ( edge . getTerm0 ( ) ) && tw . getDomain ( ) . contains ( edge . getTerm1 ( ) ) ) { if ( edgeAtom == null ) { // IRI atomURI = edge . getBAtoms ( ) . iterator ( ) . next ( ) . getIRI ( ) . getName ( ) ; edgeAtom = getHeadAtom ( headURI , "_EDGE_" + ( edgeDP . size ( ) + 1 ) /* + " _ " + atomURI . getRawFragment ( ) */ , cc . getVariables ( ) ) ; mainbody . add ( edgeAtom ) ; LinkedList < Function > edgeAtoms = new LinkedList < > ( ) ; edgeAtoms . addAll ( edge . getAtoms ( ) ) ; edgeDP . put ( edgeAtom . getFunctionSymbol ( ) , datalogFactory . getCQIE ( edgeAtom , edgeAtoms ) ) ; } for ( List < Function > twfa : tw . getFormula ( ) ) edgeDP . put ( edgeAtom . getFunctionSymbol ( ) , datalogFactory . getCQIE ( edgeAtom , twfa ) ) ; } if ( edgeAtom == null ) // no tree witnesses - - direct insertion into the main body mainbody . addAll ( edge . getAtoms ( ) ) ; } mainbody . addAll ( cc . getNonDLAtoms ( ) ) ; outputRules . add ( datalogFactory . getCQIE ( headAtom , mainbody ) ) ; } } else { // degenerate connected component LinkedList < Function > loopbody = new LinkedList < > ( ) ; Loop loop = cc . getLoop ( ) ; log . debug ( "LOOP {}" , loop ) ; if ( loop != null ) loopbody . addAll ( loop . getAtoms ( ) ) ; loopbody . addAll ( cc . getNonDLAtoms ( ) ) ; outputRules . add ( datalogFactory . getCQIE ( headAtom , loopbody ) ) ; } return outputRules ;
public class XcodeProjectWriter { /** * Create PBXProject . * @ param buildConfigurationList * build configuration list . * @ param mainGroup * main group . * @ param projectDirPath * project directory path . * @ param targets * targets . * @ param projectRoot * projectRoot directory relative to * @ return project . */ private static PBXObjectRef createPBXProject ( final PBXObjectRef buildConfigurationList , final PBXObjectRef mainGroup , final String projectDirPath , final String projectRoot , final List targets ) { } }
final Map map = new HashMap ( ) ; map . put ( "isa" , "PBXProject" ) ; map . put ( "buildConfigurationList" , buildConfigurationList . getID ( ) ) ; map . put ( "hasScannedForEncodings" , "0" ) ; map . put ( "mainGroup" , mainGroup . getID ( ) ) ; map . put ( "projectDirPath" , projectDirPath ) ; map . put ( "targets" , targets ) ; map . put ( "projectRoot" , projectRoot ) ; return new PBXObjectRef ( map ) ;
public class PPatternAssistantTC { /** * Get a set of definitions for the pattern ' s variables . Note that if the pattern includes duplicate variable names , * these are collapse into one . * @ param rp * @ param ptype * @ param scope * @ return */ public List < PDefinition > getDefinitions ( PPattern rp , PType ptype , NameScope scope ) { } }
PDefinitionSet set = af . createPDefinitionSet ( ) ; set . addAll ( af . createPPatternAssistant ( ) . getAllDefinitions ( rp , ptype , scope ) ) ; List < PDefinition > result = new Vector < PDefinition > ( set ) ; return result ;
public class Stream { /** * Returns { @ code Stream } with elements that does not satisfy the given predicate . * < p > This is an intermediate operation . * @ param predicate the predicate used to filter elements * @ return the new stream */ @ NotNull public Stream < T > filterNot ( @ NotNull final Predicate < ? super T > predicate ) { } }
return filter ( Predicate . Util . negate ( predicate ) ) ;
public class ApiOvhCaasregistry { /** * Update image * REST : PUT / caas / registry / { serviceName } / namespaces / { namespaceId } / images / { imageId } * @ param body [ required ] A container image * @ param imageId [ required ] Image id * @ param namespaceId [ required ] Namespace id * @ param serviceName [ required ] Service name * API beta */ public OvhImage serviceName_namespaces_namespaceId_images_imageId_PUT ( String serviceName , String namespaceId , String imageId , OvhInputImage body ) throws IOException { } }
String qPath = "/caas/registry/{serviceName}/namespaces/{namespaceId}/images/{imageId}" ; StringBuilder sb = path ( qPath , serviceName , namespaceId , imageId ) ; String resp = exec ( qPath , "PUT" , sb . toString ( ) , body ) ; return convertTo ( resp , OvhImage . class ) ;
public class NetworkUtil { /** * Only use the given interface on the given network interface if it is up and supports multicast */ private static boolean useInetAddress ( NetworkInterface networkInterface , InetAddress interfaceAddress ) { } }
return checkMethod ( networkInterface , isUp ) && checkMethod ( networkInterface , supportsMulticast ) && // TODO : IpV6 support ! ( interfaceAddress instanceof Inet6Address ) && ! interfaceAddress . isLoopbackAddress ( ) ;
public class WriterHandler { /** * Initialize the log . */ @ PostConstruct public void init ( ) throws ConfigException { } }
try { _pathLog . init ( ) ; super . init ( ) ; // WriteBuffer os = _ pathLog . getRotateStream ( ) . getStream ( ) ; /* if ( _ timestamp ! = null ) { _ timestampFilter = new TimestampFilter ( ) ; _ timestampFilter . setTimestamp ( _ timestamp ) ; */ /* String encoding = System . getProperty ( " file . encoding " ) ; if ( encoding ! = null ) { os . setEncoding ( encoding ) ; */ // os . setDisableClose ( true ) ; // _ os = os ; } catch ( IOException e ) { throw ConfigException . wrap ( e ) ; }
public class DocumentTemplateRepository { /** * Returns all templates for a type , ordered by application tenancy and date desc . */ @ Programmatic public List < DocumentTemplate > findByType ( final DocumentType documentType ) { } }
return repositoryService . allMatches ( new QueryDefault < > ( DocumentTemplate . class , "findByType" , "type" , documentType ) ) ;
public class Helper { /** * Gets or creates a web resource to the ANNIS service . * This is a convenience wrapper to { @ link # getAnnisWebResource ( java . lang . String , annis . security . AnnisUser ) * that does not need any arguments * @ return A reference to the ANNIS service root resource . */ public static WebResource getAnnisWebResource ( ) { } }
VaadinSession vSession = VaadinSession . getCurrent ( ) ; // get URI used by the application String uri = null ; if ( vSession != null ) { uri = ( String ) VaadinSession . getCurrent ( ) . getAttribute ( KEY_WEB_SERVICE_URL ) ; } // if already authentificated the REST client is set as the " user " property AnnisUser user = getUser ( ) ; return getAnnisWebResource ( uri , user ) ;
public class GridGenerator { /** * Method transforms the grid to an array . */ public double [ ] gridToGridArray ( double [ ] [ ] [ ] grid ) { } }
if ( grid == null ) { grid = this . grid ; } gridArray = new double [ dim [ 0 ] * dim [ 1 ] * dim [ 2 ] + 3 ] ; int dimCounter = 0 ; for ( int z = 0 ; z < grid [ 0 ] [ 0 ] . length ; z ++ ) { for ( int y = 0 ; y < grid [ 0 ] . length ; y ++ ) { for ( int x = 0 ; x < grid . length ; x ++ ) { gridArray [ dimCounter ] = grid [ x ] [ y ] [ z ] ; dimCounter ++ ; } } } return gridArray ;
public class AuthResolver { /** * Get an AuthProvider that can handle a given strategy and connection name , or null if there are no * providers to handle them . * @ param strategy to handle * @ param connection to handle * @ return an AuthProvider to handle the authentication or null if no providers are available . */ @ Nullable public static AuthProvider providerFor ( @ Nullable String strategy , @ NonNull String connection ) { } }
if ( authHandlers == null ) { return null ; } AuthProvider provider = null ; for ( AuthHandler p : authHandlers ) { provider = p . providerFor ( strategy , connection ) ; if ( provider != null ) { break ; } } return provider ;
public class Encoding { /** * Construct an Encoding for a given database encoding . * @ param databaseEncoding the name of the database encoding * @ return an Encoding instance for the specified encoding , or an Encoding instance for the * default JVM encoding if the specified encoding is unavailable . */ public static Encoding getDatabaseEncoding ( String databaseEncoding ) { } }
if ( "UTF8" . equals ( databaseEncoding ) ) { return UTF8_ENCODING ; } // If the backend encoding is known and there is a suitable // encoding in the JVM we use that . Otherwise we fall back // to the default encoding of the JVM . String [ ] candidates = encodings . get ( databaseEncoding ) ; if ( candidates != null ) { for ( String candidate : candidates ) { LOGGER . log ( Level . FINEST , "Search encoding candidate {0}" , candidate ) ; if ( Charset . isSupported ( candidate ) ) { return new Encoding ( candidate ) ; } } } // Try the encoding name directly - - maybe the charset has been // provided by the user . if ( Charset . isSupported ( databaseEncoding ) ) { return new Encoding ( databaseEncoding ) ; } // Fall back to default JVM encoding . LOGGER . log ( Level . FINEST , "{0} encoding not found, returning default encoding" , databaseEncoding ) ; return DEFAULT_ENCODING ;
public class WebApplicationContext { public void writeExternal ( java . io . ObjectOutput out ) throws java . io . IOException { } }
out . writeObject ( getContextPath ( ) ) ; out . writeObject ( getVirtualHosts ( ) ) ; HttpHandler [ ] handlers = getHandlers ( ) ; for ( int i = 0 ; i < handlers . length ; i ++ ) { if ( handlers [ i ] instanceof WebApplicationHandler ) break ; out . writeObject ( handlers [ i ] ) ; } out . writeObject ( getAttributes ( ) ) ; out . writeBoolean ( isRedirectNullPath ( ) ) ; out . writeInt ( getMaxCachedFileSize ( ) ) ; out . writeInt ( getMaxCacheSize ( ) ) ; out . writeBoolean ( getStatsOn ( ) ) ; out . writeObject ( getPermissions ( ) ) ; out . writeBoolean ( isClassLoaderJava2Compliant ( ) ) ; out . writeObject ( _defaultsDescriptor ) ; out . writeObject ( _war ) ; out . writeBoolean ( _extract ) ; out . writeBoolean ( _ignorewebjetty ) ; out . writeBoolean ( _distributable ) ; out . writeObject ( _configurationClassNames ) ;
public class CommonExpectations { /** * Sets expectations that will check : * < ol > * < li > The provided WebClient contains a cookie with the default JWT SSO cookie name , its value is a JWT , is NOT marked secure , and is marked HttpOnly * < / ol > */ public static Expectations jwtCookieExists ( String testAction , WebClient webClient , String jwtCookieName ) { } }
Expectations expectations = new Expectations ( ) ; expectations . addExpectation ( new CookieExpectation ( testAction , webClient , jwtCookieName , JwtFatConstants . JWT_REGEX , JwtFatConstants . NOT_SECURE , JwtFatConstants . HTTPONLY ) ) ; return expectations ;
public class ServiceRegistryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ServiceRegistry serviceRegistry , ProtocolMarshaller protocolMarshaller ) { } }
if ( serviceRegistry == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serviceRegistry . getRegistryArn ( ) , REGISTRYARN_BINDING ) ; protocolMarshaller . marshall ( serviceRegistry . getPort ( ) , PORT_BINDING ) ; protocolMarshaller . marshall ( serviceRegistry . getContainerName ( ) , CONTAINERNAME_BINDING ) ; protocolMarshaller . marshall ( serviceRegistry . getContainerPort ( ) , CONTAINERPORT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsMainImpl { /** * Start a messaging engine * @ param busName * @ param name */ public void startMessagingEngine ( String busName , String name ) throws Exception { } }
String thisMethodName = CLASS_NAME + ".startMessagingEngine(String, String)" ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , thisMethodName , new Object [ ] { busName , name } ) ; } BaseMessagingEngineImpl me = ( BaseMessagingEngineImpl ) getMessagingEngine ( busName , name ) ; if ( me != null ) { me . startConditional ( ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Unable to locate engine <bus=" + busName + " name=" + name + ">" ) ; throw new Exception ( "The messaging engine <bus=" + busName + " name=" + name + "> does not exist" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , thisMethodName ) ; }
public class KAFDocument { /** * Creates an Entity object to load an existing entity . It receives the ID as an argument . The entity is added to the document object . * @ param id the ID of the named entity . * @ param type entity type . 8 values are posible : Person , Organization , Location , Date , Time , Money , Percent , Misc . * @ param references it contains one or more span elements . A span can be used to reference the different occurrences of the same named entity in the document . If the entity is composed by multiple words , multiple target elements are used . * @ return a new named entity . */ public Entity newEntity ( String id , List < Span < Term > > references ) { } }
idManager . updateCounter ( AnnotationType . ENTITY , id ) ; Entity newEntity = new Entity ( id , references ) ; annotationContainer . add ( newEntity , Layer . ENTITIES , AnnotationType . ENTITY ) ; return newEntity ;
public class SeleniumSpec { /** * Takes the content of a webElement and stores it in the thread environment variable passed as parameter * @ param index position of the element in the array of webElements found * @ param envVar name of the thread environment variable where to store the text */ @ Then ( "^I save content of element in index '(\\d+?)' in environment variable '(.+?)'$" ) public void saveContentWebElementInEnvVar ( Integer index , String envVar ) { } }
assertThat ( this . commonspec , commonspec . getPreviousWebElements ( ) ) . as ( "There are less found elements than required" ) . hasAtLeast ( index ) ; String text = commonspec . getPreviousWebElements ( ) . getPreviousWebElements ( ) . get ( index ) . getText ( ) ; ThreadProperty . set ( envVar , text ) ;
public class Constraints { /** * Returns a ConditionalPropertyConstraint : one property will trigger the * validation of another . * @ see ConditionalPropertyConstraint */ public PropertyConstraint ifTrue ( PropertyConstraint ifConstraint , PropertyConstraint [ ] thenConstraints ) { } }
return new ConditionalPropertyConstraint ( ifConstraint , new CompoundPropertyConstraint ( new And ( thenConstraints ) ) ) ;
public class SSLWriteServiceContext { /** * @ see com . ibm . wsspi . tcpchannel . TCPWriteRequestContext # write ( long , com . ibm . wsspi . tcpchannel . TCPWriteCompletedCallback , boolean , int ) */ @ Override public VirtualConnection write ( long numBytes , TCPWriteCompletedCallback userCallback , boolean forceQueue , int timeout ) { } }
return write ( numBytes , userCallback , forceQueue , timeout , false ) ;
public class TraceEventHelper { /** * ToC : Connection listeners * @ param data The data * @ return The events * @ exception Exception If an error occurs */ public static Map < String , List < TraceEvent > > tocConnectionListeners ( List < TraceEvent > data ) throws Exception { } }
Map < String , List < TraceEvent > > result = new TreeMap < String , List < TraceEvent > > ( ) ; for ( TraceEvent te : data ) { if ( te . getType ( ) == TraceEvent . GET_CONNECTION_LISTENER || te . getType ( ) == TraceEvent . GET_CONNECTION_LISTENER_NEW || te . getType ( ) == TraceEvent . GET_INTERLEAVING_CONNECTION_LISTENER || te . getType ( ) == TraceEvent . GET_INTERLEAVING_CONNECTION_LISTENER_NEW ) { List < TraceEvent > l = result . get ( te . getConnectionListener ( ) ) ; if ( l == null ) l = new ArrayList < TraceEvent > ( ) ; l . add ( te ) ; result . put ( te . getConnectionListener ( ) , l ) ; } } return result ;
public class StatementExecutor { /** * Return a raw row mapper suitable for use with { @ link Dao # queryRaw ( String , RawRowMapper , String . . . ) } . */ public RawRowMapper < T > getRawRowMapper ( ) { } }
if ( rawRowMapper == null ) { rawRowMapper = new RawRowMapperImpl < T , ID > ( dao ) ; } return rawRowMapper ;
public class ContextFromVertx { /** * Get the path parameter for the given key . * The parameter will be decoded based on the RFCs . * Check out http : / / docs . oracle . com / javase / 6 / docs / api / java / net / URI . html for * more information . * @ param name The name of the path parameter in a route . Eg * / { myName } / rest / of / url * @ return The decoded path parameter , or null if no such path parameter was * found . */ @ Override public String parameterFromPath ( String name ) { } }
String encodedParameter = route . getPathParametersEncoded ( path ( ) ) . get ( name ) ; if ( encodedParameter == null ) { return null ; } else { // #514 - If the encoded parameter contains : it should be encoded manually . // Some library don ' t meaning that the URI creation fails as : is not allowed . if ( encodedParameter . contains ( ":" ) ) { encodedParameter = encodedParameter . replace ( ":" , "%3A" ) ; } return URI . create ( encodedParameter ) . getPath ( ) ; }
public class EncryptionUtil { /** * Reads the given resource as a string . * @ param resourceName * The name of the resource to read . * @ return The content of the resource as a string . * @ throws IOException * When it was not possible to read the resource . */ private String readResource ( final String resourceName ) throws IOException { } }
return new BufferedReader ( new InputStreamReader ( getClass ( ) . getResourceAsStream ( resourceName ) , DEFAULT_CHARSET ) ) . lines ( ) . collect ( Collectors . joining ( "\n" ) ) ;
public class LongTuples { /** * Creates a new tuple that is a < i > view < / i > * on the specified portion of the given parent . Changes in the * parent will be visible in the returned tuple . * @ param parent The parent tuple * @ param fromIndex The start index in the parent , inclusive * @ param toIndex The end index in the parent , exclusive * @ throws NullPointerException If the given parent is < code > null < / code > * @ throws IllegalArgumentException If the given indices are invalid . * This is the case when < code > fromIndex & lt ; 0 < / code > , * < code > fromIndex & gt ; toIndex < / code > , or * < code > toIndex & gt ; { @ link Tuple # getSize ( ) parent . getSize ( ) } < / code > , * @ return The new tuple */ static LongTuple createSubTuple ( LongTuple parent , int fromIndex , int toIndex ) { } }
return new SubLongTuple ( parent , fromIndex , toIndex ) ;
public class ResultSetDataModel { /** * < p > Return < code > true < / code > if there is < code > wrappedData < / code > * available , and the result of calling < code > absolute ( ) < / code > on the * underlying < code > ResultSet < / code > , passing the current value of * < code > rowIndex < / code > plus one ( to account for the fact that * < code > ResultSet < / code > uses one - relative indexing ) , returns * < code > true < / code > . Otherwise , return < code > false < / code > . < / p > * @ throws FacesException if an error occurs getting the row availability */ public boolean isRowAvailable ( ) { } }
if ( resultSet == null ) { return ( false ) ; } else if ( index < 0 ) { return ( false ) ; } try { if ( resultSet . absolute ( index + 1 ) ) { return ( true ) ; } else { return ( false ) ; } } catch ( SQLException e ) { throw new FacesException ( e ) ; }
public class ParentFinder { /** * { @ inheritDoc } */ @ Override public Collection < String > findBySurnamesBeginWith ( final FinderObject owner , final String beginsWith ) { } }
return owner . findInParentBySurnamesBeginWith ( beginsWith ) ;
public class BigRational { /** * Calculates the addition ( + ) of this rational number and the specified argument . * < p > The result has no loss of precision . < / p > * @ param value the rational number to add * @ return the resulting rational number */ public BigRational add ( BigRational value ) { } }
if ( denominator . equals ( value . denominator ) ) { return of ( numerator . add ( value . numerator ) , denominator ) ; } BigDecimal n = numerator . multiply ( value . denominator ) . add ( value . numerator . multiply ( denominator ) ) ; BigDecimal d = denominator . multiply ( value . denominator ) ; return of ( n , d ) ;
public class BeanMapping { /** * 読み込み用の { @ link CellProcessor } を取得する 。 * @ return カラムの位置順に整列されている { @ link CellProcessor } の一覧 。 */ public CellProcessor [ ] getCellProcessorsForReading ( ) { } }
return columns . stream ( ) . map ( c -> c . getCellProcessorForReading ( ) ) . toArray ( n -> new CellProcessor [ n ] ) ;
public class DefaultWebApplicationFactory { /** * < p > register . < / p > */ public void register ( ) { } }
if ( registration != null ) { throw new IllegalStateException ( "Webapplication is already registered." ) ; } validateNotNull ( applicationName , "applicationName" ) ; validateNotNull ( mountPoint , "mountPoint" ) ; Hashtable < String , Object > props = new Hashtable < String , Object > ( ) ; props . put ( Constants . APPLICATION_NAME , applicationName ) ; props . put ( Constants . MOUNTPOINT , mountPoint ) ; props . put ( Constants . CONTEXT_PARAMS , contextParam ) ; registration = bundleContext . registerService ( WebApplicationFactory . class , this , props ) ;
public class CSSUnits { /** * Converts the font size given by an identifier to absolute length in pixels . * @ param parent Parent font size ( taken as 1em ) * @ param value The size specification to be converted * @ return absolute font size in px */ public static double convertFontSize ( double parent , CSSProperty . FontSize value ) { } }
double em = parent ; double ret = em ; if ( value == CSSProperty . FontSize . MEDIUM ) ret = medium_font ; else if ( value == CSSProperty . FontSize . SMALL ) ret = medium_font / font_step ; else if ( value == CSSProperty . FontSize . X_SMALL ) ret = medium_font / font_step / font_step ; else if ( value == CSSProperty . FontSize . XX_SMALL ) ret = medium_font / font_step / font_step / font_step ; else if ( value == CSSProperty . FontSize . LARGE ) ret = medium_font * font_step ; else if ( value == CSSProperty . FontSize . X_LARGE ) ret = medium_font * font_step * font_step ; else if ( value == CSSProperty . FontSize . XX_LARGE ) ret = medium_font * font_step * font_step * font_step ; else if ( value == CSSProperty . FontSize . SMALLER ) ret = em / font_step ; else if ( value == CSSProperty . FontSize . LARGER ) ret = em * font_step ; return ret ;
public class BitsUtil { /** * Creates a new BitSet of fixed cardinality with randomly set bits . * @ param card the cardinality of the BitSet to create * @ param capacity the capacity of the BitSet to create - the randomly * generated indices of the bits set to true will be uniformly * distributed between 0 ( inclusive ) and capacity ( exclusive ) * @ param random a Random Object to create the sequence of indices set to true * - the same number occurring twice or more is ignored but the already * selected bit remains true * @ return a new BitSet with randomly set bits */ public static long [ ] random ( int card , int capacity , Random random ) { } }
if ( card < 0 || card > capacity ) { throw new IllegalArgumentException ( "Cannot set " + card + " out of " + capacity + " bits." ) ; } // FIXME : Avoid recomputing the cardinality . if ( card < capacity >>> 1 ) { long [ ] bitset = BitsUtil . zero ( capacity ) ; for ( int todo = card ; todo > 0 ; todo = ( todo == 1 ) ? ( card - cardinality ( bitset ) ) : ( todo - 1 ) ) { setI ( bitset , random . nextInt ( capacity ) ) ; } return bitset ; } else { long [ ] bitset = BitsUtil . ones ( capacity ) ; for ( int todo = capacity - card ; todo > 0 ; todo = ( todo == 1 ) ? ( cardinality ( bitset ) - card ) : ( todo - 1 ) ) { clearI ( bitset , random . nextInt ( capacity ) ) ; } return bitset ; }
public class ReflectionMbean { /** * Find attribute methods from our object that will be exposed via JMX . */ private void discoverAttributeFields ( Map < String , JmxAttributeFieldInfo > attributeFieldInfoMap , Set < String > attributeNameSet , List < MBeanAttributeInfo > attributes ) { } }
for ( Class < ? > clazz = target . getClass ( ) ; clazz != Object . class ; clazz = clazz . getSuperclass ( ) ) { discoverAttributeFields ( attributeFieldInfoMap , attributeNameSet , attributes , clazz ) ; }
public class AnnotationsUtil { /** * Small utility to easily get an annotation and will throw an exception if not provided . * @ param annotatedType The source type to tcheck the annotation on * @ param annotationClass The annotation to look for * @ param < T > The annotation subtype * @ return The annotation that was requested * @ throws ODataSystemException If unable to find the annotation or nullpointer in case null source was specified */ public static < T extends Annotation > T getAnnotation ( AnnotatedElement annotatedType , Class < T > annotationClass ) { } }
return getAnnotation ( annotatedType , annotationClass , null ) ;
public class CreateLocationNfsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateLocationNfsRequest createLocationNfsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createLocationNfsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createLocationNfsRequest . getSubdirectory ( ) , SUBDIRECTORY_BINDING ) ; protocolMarshaller . marshall ( createLocationNfsRequest . getServerHostname ( ) , SERVERHOSTNAME_BINDING ) ; protocolMarshaller . marshall ( createLocationNfsRequest . getOnPremConfig ( ) , ONPREMCONFIG_BINDING ) ; protocolMarshaller . marshall ( createLocationNfsRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VortexFuture { /** * Called by VortexMaster to let the user know that the Tasklet completed . */ @ Private @ Override public void completed ( final int pTaskletId , final TOutput result ) { } }
assert taskletId == pTaskletId ; this . userResult = Optional . ofNullable ( result ) ; if ( callbackHandler != null ) { executor . execute ( new Runnable ( ) { @ Override public void run ( ) { callbackHandler . onSuccess ( userResult . get ( ) ) ; } } ) ; } this . countDownLatch . countDown ( ) ;
public class AlertsEngineCache { /** * Register a new DataEntry ( triggerId , conditionId , dataId ) * @ param dataEntry to register on this node */ public void add ( DataEntry dataEntry ) { } }
activeDataEntries . add ( dataEntry ) ; DataId newDataId = new DataId ( dataEntry . getTenantId ( ) , dataEntry . getDataId ( ) ) ; if ( ! activeDataIds . contains ( newDataId ) ) { activeDataIds . add ( newDataId ) ; }
public class MetaModelBuilder { /** * On declared fields . * @ param < X > * the generic type * @ param clazz * the clazz * @ param managedType * the managed type */ private < X > void onDeclaredFields ( Class < X > clazz , AbstractManagedType < X > managedType ) { } }
Field [ ] embeddedFields = clazz . getDeclaredFields ( ) ; for ( Field f : embeddedFields ) { if ( isNonTransient ( f ) ) { new TypeBuilder < T > ( f ) . build ( managedType , f . getType ( ) ) ; } }
public class AntClassLoader { /** * Sets the current thread ' s context loader to this classloader , storing * the current loader value for later resetting . */ public void setThreadContextLoader ( ) { } }
if ( isContextLoaderSaved ) { throw new BuildException ( "Context loader has not been reset" ) ; } if ( LoaderUtils . isContextLoaderAvailable ( ) ) { savedContextLoader = LoaderUtils . getContextClassLoader ( ) ; ClassLoader loader = this ; if ( project != null && "only" . equals ( project . getProperty ( "build.sysclasspath" ) ) ) { loader = this . getClass ( ) . getClassLoader ( ) ; } LoaderUtils . setContextClassLoader ( loader ) ; isContextLoaderSaved = true ; }
public class Optional { /** * Apply a predicate to the optional , if the optional is present , and the predicate is true , return the optional , otherwise * return empty . * @ param predicate the predicate to apply * @ return the optional if the predicate is true , empty if not * @ since 1.7 */ public Optional < T > filter ( final Predicate < ? super T > predicate ) { } }
if ( isPresent ( ) && predicate . test ( get ( ) ) ) { return this ; } return empty ( ) ;
public class ManagedBeanDestroyer { /** * Listens to PreDestroyCustomScopeEvent and PreDestroyViewMapEvent * and invokes destroy ( ) for every managed bean in the associated scope . */ public void processEvent ( SystemEvent event ) { } }
Map < String , Object > scope = null ; if ( event instanceof PreDestroyViewMapEvent ) { UIViewRoot viewRoot = ( UIViewRoot ) ( ( PreDestroyViewMapEvent ) event ) . getComponent ( ) ; scope = viewRoot . getViewMap ( false ) ; if ( scope == null ) { // view map does not exist - - > nothing to destroy return ; } } else if ( event instanceof PreDestroyCustomScopeEvent ) { ScopeContext scopeContext = ( ( PreDestroyCustomScopeEvent ) event ) . getContext ( ) ; scope = scopeContext . getScope ( ) ; } else { // wrong event return ; } if ( ! scope . isEmpty ( ) ) { Set < String > keySet = scope . keySet ( ) ; String [ ] keys = keySet . toArray ( new String [ keySet . size ( ) ] ) ; for ( String key : keys ) { Object value = scope . get ( key ) ; this . destroy ( key , value ) ; } }
public class StandardAccessManager { /** * Cleans up expired sessions . * @ param officialTime */ public void onPageEvent ( long officialTime ) { } }
// cleanup expired sessions ArrayList garbage = new ArrayList ( sessionsMirror . size ( ) ) ; Iterator i ; StandardSession session ; synchronized ( sessionsMirror ) { i = sessionsMirror . values ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { session = ( StandardSession ) i . next ( ) ; if ( session . isExpired ( ) ) { garbage . add ( session ) ; } } } i = garbage . iterator ( ) ; while ( i . hasNext ( ) ) { session = ( StandardSession ) i . next ( ) ; System . out . println ( new LogEntry ( "session " + session . getToken ( ) + " expired..." ) ) ; this . destroySessionByToken ( session . getToken ( ) ) ; }
public class DriverFactory { /** * Sets the target browser binary path in chromeOptions if it exists in configuration . * @ param capabilities * The global DesiredCapabilities */ private void setChromeOptions ( final DesiredCapabilities capabilities , ChromeOptions chromeOptions ) { } }
// Set custom downloaded file path . When you check content of downloaded file by robot . final HashMap < String , Object > chromePrefs = new HashMap < > ( ) ; chromePrefs . put ( "download.default_directory" , System . getProperty ( USER_DIR ) + File . separator + DOWNLOADED_FILES_FOLDER ) ; chromeOptions . setExperimentalOption ( "prefs" , chromePrefs ) ; // Set custom chromium ( if you not use default chromium on your target device ) final String targetBrowserBinaryPath = Context . getWebdriversProperties ( "targetBrowserBinaryPath" ) ; if ( targetBrowserBinaryPath != null && ! "" . equals ( targetBrowserBinaryPath ) ) { chromeOptions . setBinary ( targetBrowserBinaryPath ) ; } capabilities . setCapability ( ChromeOptions . CAPABILITY , chromeOptions ) ;
public class GeoBBoxCondition { /** * { @ inheritDoc } */ @ Override public Query doQuery ( SpatialStrategy strategy ) { } }
Rectangle rectangle = CONTEXT . makeRectangle ( minLongitude , maxLongitude , minLatitude , maxLatitude ) ; SpatialArgs args = new SpatialArgs ( SpatialOperation . Intersects , rectangle ) ; return strategy . makeQuery ( args ) ;
public class ConfigUtils { /** * Method is used to return an enum value from a given string . * @ param enumClass Class for the resulting enum value * @ param config config to read the value from * @ param key key for the value * @ param < T > Enum class to return type for . * @ return enum value for the given key . * @ see ValidEnum */ public static < T extends Enum < T > > T getEnum ( Class < T > enumClass , AbstractConfig config , String key ) { } }
Preconditions . checkNotNull ( enumClass , "enumClass cannot be null" ) ; Preconditions . checkState ( enumClass . isEnum ( ) , "enumClass must be an enum." ) ; String textValue = config . getString ( key ) ; return Enum . valueOf ( enumClass , textValue ) ;
public class FixBondOrdersTool { /** * Gets the List of bond nos corresponding to a particular set of fused rings . * @ param { @ link IAtomContainer } molecule * @ param ringGroup * @ param { @ link IRingSet } ringSet * @ return List of bond numbers for each set */ private List < Integer > getBondNosForRingGroup ( IAtomContainer molecule , List < Integer > ringGroup , IRingSet ringSet ) { } }
List < Integer > btc = new ArrayList < Integer > ( ) ; for ( Integer i : ringGroup ) { for ( IBond bond : ringSet . getAtomContainer ( i ) . bonds ( ) ) { if ( btc . size ( ) > 0 ) { if ( ! btc . contains ( molecule . indexOf ( bond ) ) ) { btc . add ( molecule . indexOf ( bond ) ) ; } } else { btc . add ( molecule . indexOf ( bond ) ) ; } } } return btc ;
public class SimpleClassLoadHelper { /** * Enable sharing of the class - loader with 3rd party . * @ return the class - loader user be the helper . */ public ClassLoader getClassLoader ( ) { } }
// To follow the same behavior of Class . forName ( . . . ) I had to play // dirty ( Supported by Sun , IBM & BEA JVMs ) try { // Get a reference to this class ' class - loader final ClassLoader cl = getClass ( ) . getClassLoader ( ) ; // Create a method instance representing the protected // getCallerClassLoader method of class ClassLoader final Method mthd = ClassLoader . class . getDeclaredMethod ( "getCallerClassLoader" , new Class < ? > [ 0 ] ) ; if ( false ) { // Make the method accessible . AccessibleObject . setAccessible ( new AccessibleObject [ ] { mthd } , true ) ; } // Try to get the caller ' s class - loader return ( ClassLoader ) mthd . invoke ( cl , new Object [ 0 ] ) ; } catch ( final Throwable all ) { // Use this class ' class - loader return getClass ( ) . getClassLoader ( ) ; }
public class HttpParser { /** * / * Quick lookahead for the start state looking for a request method or a HTTP version , * otherwise skip white space until something else to parse . */ private boolean quickStart ( ByteBuffer buffer ) { } }
if ( _requestHandler != null ) { _method = HttpMethod . lookAheadGet ( buffer ) ; if ( _method != null ) { _methodString = _method . asString ( ) ; buffer . position ( buffer . position ( ) + _methodString . length ( ) + 1 ) ; setState ( State . SPACE1 ) ; return false ; } } else if ( _responseHandler != null ) { _version = HttpVersion . lookAheadGet ( buffer ) ; if ( _version != null ) { buffer . position ( buffer . position ( ) + _version . asString ( ) . length ( ) + 1 ) ; setState ( State . SPACE1 ) ; return false ; } } // Quick start look while ( _state == State . START && buffer . hasRemaining ( ) ) { HttpTokens . Token t = next ( buffer ) ; if ( t == null ) break ; switch ( t . getType ( ) ) { case ALPHA : case DIGIT : case TCHAR : case VCHAR : { _string . setLength ( 0 ) ; _string . append ( t . getChar ( ) ) ; setState ( _requestHandler != null ? State . METHOD : State . RESPONSE_VERSION ) ; return false ; } case OTEXT : case SPACE : case HTAB : throw new IllegalCharacterException ( _state , t , buffer ) ; default : break ; } // count this white space as a header byte to avoid DOS if ( _maxHeaderBytes > 0 && ++ _headerBytes > _maxHeaderBytes ) { LOG . warn ( "padding is too large >" + _maxHeaderBytes ) ; throw new BadMessageException ( HttpStatus . BAD_REQUEST_400 ) ; } } return false ;
public class Client { /** * Send batch request to the LFS - server . * @ param batchReq Batch request . * @ return Object metadata . * @ throws IOException */ @ NotNull public BatchRes postBatch ( @ NotNull final BatchReq batchReq ) throws IOException { } }
return doWork ( auth -> doRequest ( auth , new JsonPost < > ( batchReq , BatchRes . class ) , AuthHelper . join ( auth . getHref ( ) , PATH_BATCH ) ) , batchReq . getOperation ( ) ) ;
public class ELCSSBundleTag { /** * ( non - Javadoc ) * @ see javax . servlet . jsp . tagext . TagSupport # release ( ) */ @ Override public void release ( ) { } }
super . release ( ) ; setSrcExpr ( null ) ; setUseRandomParamExpr ( null ) ; setMediaExpr ( null ) ; setTitleExpr ( null ) ; setAlternateExpr ( null ) ; setDisplayAlternateExpr ( null ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcPropertyEnumeratedValue ( ) { } }
if ( ifcPropertyEnumeratedValueEClass == null ) { ifcPropertyEnumeratedValueEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 469 ) ; } return ifcPropertyEnumeratedValueEClass ;
public class AmazonAutoScalingClient { /** * Describes the notification actions associated with the specified Auto Scaling group . * @ param describeNotificationConfigurationsRequest * @ return Result of the DescribeNotificationConfigurations operation returned by the service . * @ throws InvalidNextTokenException * The < code > NextToken < / code > value is not valid . * @ throws ResourceContentionException * You already have a pending update to an Amazon EC2 Auto Scaling resource ( for example , an Auto Scaling * group , instance , or load balancer ) . * @ sample AmazonAutoScaling . DescribeNotificationConfigurations * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / autoscaling - 2011-01-01 / DescribeNotificationConfigurations " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeNotificationConfigurationsResult describeNotificationConfigurations ( DescribeNotificationConfigurationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeNotificationConfigurations ( request ) ;
public class PeriodFormatter { /** * Prints a ReadablePeriod to a Writer . * @ param out the formatted period is written out * @ param period the period to format , not null */ public void printTo ( Writer out , ReadablePeriod period ) throws IOException { } }
checkPrinter ( ) ; checkPeriod ( period ) ; getPrinter ( ) . printTo ( out , period , iLocale ) ;
public class HolderMBean { protected void defineManagedResource ( ) { } }
super . defineManagedResource ( ) ; defineAttribute ( "name" ) ; defineAttribute ( "displayName" ) ; defineAttribute ( "className" ) ; defineAttribute ( "initParameters" , READ_ONLY , ON_MBEAN ) ; _holder = ( Holder ) getManagedResource ( ) ;
public class WikipediaCleaner { /** * Remove wiki citations of the form " { { cite . . . } } " , which links to some * text or another wikipedia link . * @ param article The article text to clean . */ private void removeDoubleBraceMarkup ( StringBuilder article ) { } }
int braceStart = article . indexOf ( "{{" ) ; // Repeatedly loop while { { } } text still exists in the document while ( braceStart >= 0 ) { // Find the matching closing } } if it exists . Some wikipedia // text is malformated , with no matching brace . so take no // action in this case . int braceEnd = article . indexOf ( "}}" , braceStart ) ; int nextBraceStart = article . indexOf ( "{{" , braceStart + 1 ) ; // Some { { content has embedded { { content , which causes problems // for nearest matching . Recursively search until a nearest - match // is found and then while ( nextBraceStart > braceStart && nextBraceStart < braceEnd ) { removeEmbeddedBrace ( article , nextBraceStart ) ; // Recompute the ending brace , since removing the embedded { { // will have removed the } } as well . braceEnd = article . indexOf ( "}}" , braceStart ) ; nextBraceStart = article . indexOf ( "{{" , braceStart + 1 ) ; } if ( braceEnd < 0 ) break ; article . delete ( braceStart , braceEnd + 2 ) ; // Search for the next { { if it exists braceStart = article . indexOf ( "{{" , braceStart ) ; }
public class AbstractJcrNode { /** * Obtain an iterator over the nodes that reference this node . * @ param referenceType specification of the type of references to include ; may not be null * @ return the iterator over the referencing nodes ; never null * @ throws RepositoryException if an error occurs while obtaining the information */ protected final NodeIterator referringNodes ( ReferenceType referenceType ) throws RepositoryException { } }
if ( ! this . isReferenceable ( ) ) { return JcrEmptyNodeIterator . INSTANCE ; } // Get all of the nodes that are referring to this node . . . Set < NodeKey > keys = node ( ) . getReferrers ( sessionCache ( ) , referenceType ) ; if ( keys . isEmpty ( ) ) return JcrEmptyNodeIterator . INSTANCE ; return new JcrNodeIterator ( session ( ) , keys . iterator ( ) , keys . size ( ) , null ) ;
public class JstormYarnUtils { /** * Join an array of strings with a separator that appears after every * instance in the list - optionally at the end * @ param collection strings * @ param separator separator string * @ param trailing add a trailing entry or not * @ return the joined entries */ public static String join ( String [ ] collection , String separator , boolean trailing ) { } }
return join ( Arrays . asList ( collection ) , separator , trailing ) ;
public class GBSTree { /** * Add to predecssor or current . * < ol > * < li > There is no left child . * < li > There might or might not be a right child * < li > INSERT _ KEY < MEDIAN of current node * < li > INSERT _ KEY > MEDIAN of predecessor ( if there is one ) * < / ol > * < p > In the example below , if the current node is GHI , then the insert * key is greater than E and less than H . If the current node is MNO , * then the insert key is greater than K and less than N . If the * current node is ABC , the insert key is less than B and there is no * predecessor because there is no node from which we moved right . < / p > * < pre > * * - - - - - J . K . L - - - - - * * * - - - - - D . E . F - - - - - * * - - - - - P . Q . R - - - - - * * A . B . C G . H . I M . N . O S . T . U * < / pre > * < p > We tried to move left and fell off the end . If the key value is * greater than the high key of the predecessor or if there is no * predecessor , the insert point is in the left half of the current * node . Otherwise , the insert point is in the right half of the * predecessor node . < / p > * @ param p Current node from which we tried to move left * @ param r Last node from which we actually moved right ( logical predecessor ) * @ param new1 Key being inserted * @ param ip Scratch NodeInsertPoint to avoid allocating a new one * @ param point Returned insert points */ private void leftAdd ( GBSNode p , GBSNode r , Object new1 , NodeInsertPoint ip , InsertNodes point ) { } }
if ( r == null ) /* There is no upper predecessor */ leftAddNoPredecessor ( p , new1 , ip , point ) ; else /* There is an upper predecessor */ leftAddWithPredecessor ( p , r , new1 , ip , point ) ;
public class DITypeInfo { /** * Retrieves the maximum Long . MAX _ VALUE bounded length , in bytes , for * character types . < p > * @ return the maximum Long . MAX _ VALUE bounded length , in * bytes , for character types */ Long getCharOctLenAct ( ) { } }
switch ( type ) { case Types . SQL_CHAR : case Types . SQL_NCHAR : case Types . SQL_VARCHAR : case Types . SQL_NVARCHAR : return ValuePool . getLong ( 2L * Integer . MAX_VALUE ) ; case Types . SQL_CLOB : case Types . NCLOB : return ValuePool . getLong ( Long . MAX_VALUE ) ; default : return null ; }
public class ByteUtil { /** * increment byte array as a number until max is reached * @ param bytes * byte [ ] * @ return boolean */ public static boolean increment ( byte [ ] bytes ) { } }
final int startIndex = 0 ; int i ; for ( i = bytes . length - 1 ; i >= startIndex ; i -- ) { bytes [ i ] ++ ; if ( bytes [ i ] != 0 ) break ; } // we return false when all bytes are 0 again return ( i >= startIndex || bytes [ startIndex ] != 0 ) ;
public class HashIntSet { /** * documentation inherited from interface IntSet */ public Interator interator ( ) { } }
return new AbstractInterator ( ) { public boolean hasNext ( ) { checkConcurrentModification ( ) ; return _pos < _size ; } public int nextInt ( ) { checkConcurrentModification ( ) ; if ( _pos >= _size ) { throw new NoSuchElementException ( ) ; } if ( _idx == 0 ) { // start after a sentinel . if we don ' t and instead start in the middle of a // run of filled buckets , we risk returning values that will reappear at the // end of the list after being shifted over to due to a removal while ( _buckets [ _idx ++ ] != _sentinel ) ; } int mask = _buckets . length - 1 ; for ( ; _pos < _size ; _idx ++ ) { int value = _buckets [ _idx & mask ] ; if ( value != _sentinel ) { _pos ++ ; _idx ++ ; return value ; } } // we shouldn ' t get here throw new RuntimeException ( "Ran out of elements getting next" ) ; } @ Override public void remove ( ) { checkConcurrentModification ( ) ; if ( _idx == 0 ) { throw new IllegalStateException ( "Next method not yet called" ) ; } int pidx = ( -- _idx ) & ( _buckets . length - 1 ) ; if ( _buckets [ pidx ] == _sentinel ) { throw new IllegalStateException ( "No element to remove" ) ; } _buckets [ pidx ] = _sentinel ; _pos -- ; _size -- ; _omodcount = ++ _modcount ; shift ( pidx ) ; } protected void checkConcurrentModification ( ) { if ( _modcount != _omodcount ) { throw new ConcurrentModificationException ( ) ; } } protected int _pos , _idx ; protected int _omodcount = _modcount ; } ;
public class ServerStatisticsServlet { /** * Handle all requests to Statistics Servlet { @ inheritDoc } */ @ Override protected void doPost ( final HttpServletRequest req , final HttpServletResponse resp ) throws ServletException , IOException { } }
final boolean noCache = null != req && Boolean . valueOf ( req . getParameter ( noCacheParamName ) ) ; if ( noCache || System . currentTimeMillis ( ) - lastRefreshedTime > cacheTimeInMilliseconds ) { this . populateStatistics ( noCache ) ; } JSONUtils . toJSON ( cachedstats , resp . getOutputStream ( ) , true ) ;
public class ListMath { /** * Raises each value in a list to the same power . * @ param data The list of numbers to raise to a power * @ param expon The power to raise each number in the list to * @ return result [ x ] = data [ x ] * * expon */ public static ListDouble pow ( final ListNumber data , final double expon ) { } }
return new ListDouble ( ) { @ Override public double getDouble ( int index ) { return Math . pow ( data . getDouble ( index ) , expon ) ; } @ Override public int size ( ) { return data . size ( ) ; } } ;
public class XClaimArgs { /** * This is the same as IDLE but instead of a relative amount of milliseconds , it sets the idle time to a specific unix time * ( in milliseconds ) . This is useful in order to rewrite the AOF file generating XCLAIM commands . * @ param timestamp idle time . * @ return { @ code this } . */ public XClaimArgs time ( TemporalAccessor timestamp ) { } }
LettuceAssert . notNull ( timestamp , "Timestamp must not be null" ) ; return time ( Instant . from ( timestamp ) . toEpochMilli ( ) ) ;
public class PagingTableModel { /** * Sets the maximum size of the page . * If the given maximum size is greater than the current maximum size a new page will be loaded , otherwise the current page * will be shrunk to meet the given maximum size . In both cases the { @ code TableModelListener } will be notified of the * change . * The call to this method has no effect if the given maximum size is equal to the current maximum size . * @ param maxPageSize the new maximum page size * @ throws IllegalArgumentException if { @ code maxPageSize } is negative or zero . * @ see # setMaxPageSizeWithoutPageChanges ( int ) * @ see TableModelListener */ public void setMaxPageSize ( final int maxPageSize ) { } }
if ( maxPageSize <= 0 ) { throw new IllegalArgumentException ( "Parameter maxPageSize must be greater than zero." ) ; } if ( this . maxPageSize == maxPageSize ) { return ; } int oldMaxPageSize = this . maxPageSize ; setMaxPageSizeWithoutPageChanges ( maxPageSize ) ; int rowCount = getRowCount ( ) ; if ( rowCount > 0 ) { if ( maxPageSize > oldMaxPageSize ) { schedule ( dataOffset ) ; } else if ( data . size ( ) > maxPageSize ) { final List < T > shrunkData = data . subList ( 0 , maxPageSize ) ; EventQueue . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { setData ( dataOffset , new ArrayList < > ( shrunkData ) ) ; } } ) ; } }
public class AbstractPersistingLongIDFactory { /** * Note : this implementation must be synchronized because the method calling * this only uses a readLock ! */ public final long getNewID ( ) { } }
return m_aLock . locked ( ( ) -> { if ( m_nID >= m_nLastID ) { // Read new IDs final long nNewID = readAndUpdateIDCounter ( m_nReserveCount ) ; // the existing ID may not be < than the previously used ID ! if ( m_nLastID >= 0 && nNewID < m_nID ) throw new IllegalStateException ( "The read value " + nNewID + " is smaller than the last known ID " + m_nID + "!" ) ; m_nID = nNewID ; m_nLastID = nNewID + m_nReserveCount ; } return m_nID ++ ; } ) ;
public class ISOBlueDevice { /** * Create a pair of { @ link BufferedISOBUSSocket } s which will receive all * { @ link Message } s stored by ISOBlue coming after the specified one . < br > * One socket will receive engine bus messages and the other will receive * implement bus messages . * @ param fromId * the ID corresponding to the { @ link Message } after which these * sockets will start receiving * @ return An array containing two buffered sockets . < br > * Index 0 contains the socket which will receive engine bus * messages . < br > * Index 1 contains the socket which will receive implement bus * messages . * @ throws IOException * @ throws InterruptedException */ public ISOBUSSocket [ ] createBufferedISOBUSSockets ( Serializable fromId ) throws IOException , InterruptedException { } }
BufferedISOBUSSocket [ ] socks = new BufferedISOBUSSocket [ 2 ] ; Serializable toId ; toId = getStartId ( ) ; // Create socket for past engine messages socks [ 0 ] = new BufferedISOBUSSocket ( fromId , toId , mEngineBus , null , null ) ; // Create socket for past implement messages socks [ 1 ] = new BufferedISOBUSSocket ( fromId , toId , mImplementBus , null , null ) ; // Create command to ask ISOBlue for past data sendCommand ( ( new ISOBlueCommand ( ISOBlueCommand . OpCode . PAST , ( byte ) - 1 , ( byte ) - 1 , String . format ( "%08x%08x" , fromId , toId ) . getBytes ( ) ) ) ) ; return socks ;
public class LoggerRepositoryExImpl { /** * Return this repository ' s own scheduler . * The scheduler is lazily instantiated . * @ return this repository ' s own scheduler . */ public Scheduler getScheduler ( ) { } }
if ( scheduler == null ) { scheduler = new Scheduler ( ) ; scheduler . setDaemon ( true ) ; scheduler . start ( ) ; } return scheduler ;
public class AbstractEngineSync { /** * Returns the last modified time for the cache entry * @ param filename File to look for */ public long lastModified ( final String filename ) { } }
LRUCache . CacheEntry < String , T > cacheEntry = cache . get ( filename ) ; if ( cacheEntry == null ) { return - 1 ; } return cacheEntry . lastModified ;
public class GlobalConfiguration { /** * Loads a YAML - file of key - value pairs . * Colon and whitespace " : " separate key and value ( one per line ) . The hash tag " # " starts a single - line comment . * Example : * < pre > * jobmanager . rpc . address : localhost # network address for communication with the job manager * jobmanager . rpc . port : 6123 # network port to connect to for communication with the job manager * taskmanager . rpc . port : 6122 # network port the task manager expects incoming IPC connections * < / pre > * This does not span the whole YAML specification , but only the * syntax * of simple YAML key - value pairs ( see issue * # 113 on GitHub ) . If at any point in time , there is a need to go beyond simple key - value pairs syntax * compatibility will allow to introduce a YAML parser library . * @ param file the YAML file to read from * @ see < a href = " http : / / www . yaml . org / spec / 1.2 / spec . html " > YAML 1.2 specification < / a > * @ see < a href = " https : / / github . com / stratosphere / stratosphere / issues / 113 " > Issue # 113 < / a > */ private void loadYAMLResource ( final File file ) { } }
BufferedReader reader = null ; try { reader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) ) ) ; String line = null ; while ( ( line = reader . readLine ( ) ) != null ) { // 1 . check for comments String [ ] comments = line . split ( "#" , 2 ) ; String conf = comments [ 0 ] ; // 2 . get key and value if ( conf . length ( ) > 0 ) { String [ ] kv = conf . split ( ": " , 2 ) ; // skip line with no valid key - value pair if ( kv . length == 1 ) { LOG . warn ( "Error while trying to split key and value in configuration file " + file + ": " + line ) ; continue ; } String key = kv [ 0 ] . trim ( ) ; String value = kv [ 1 ] . trim ( ) ; // sanity check if ( key . length ( ) == 0 || value . length ( ) == 0 ) { LOG . warn ( "Error after splitting key and value in configuration file " + file + ": " + line ) ; continue ; } LOG . debug ( "Loading configuration property: " + key + ", " + value ) ; this . confData . put ( key , value ) ; } } } catch ( IOException e ) { e . printStackTrace ( ) ; } finally { try { reader . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
public class EntityCachingService { /** * Removes the entity identified by type and key from the cache and notifies peer caches . * @ param type Class * @ param key String * @ exception org . apereo . portal . concurrency . CachingException */ @ Override public void remove ( Class < ? extends IBasicEntity > type , String key ) throws CachingException { } }
EntityCachingServiceLocator . getEntityCachingService ( ) . remove ( type , key ) ;
public class AppServiceCertificateOrdersInner { /** * List all certificates associated with a certificate order . * List all certificates associated with a certificate order . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param certificateOrderName Name of the certificate order . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; AppServiceCertificateResourceInner & gt ; object if successful . */ public PagedList < AppServiceCertificateResourceInner > listCertificates ( final String resourceGroupName , final String certificateOrderName ) { } }
ServiceResponse < Page < AppServiceCertificateResourceInner > > response = listCertificatesSinglePageAsync ( resourceGroupName , certificateOrderName ) . toBlocking ( ) . single ( ) ; return new PagedList < AppServiceCertificateResourceInner > ( response . body ( ) ) { @ Override public Page < AppServiceCertificateResourceInner > nextPage ( String nextPageLink ) { return listCertificatesNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class NTLMResponses { /** * Calculates the NTLM Response for the given challenge , using the * specified password . * @ param password The user ' s password . * @ param challenge The Type 2 challenge from the server . * @ return The NTLM Response . */ public static byte [ ] getNTLMResponse ( String password , byte [ ] challenge ) throws Exception { } }
byte [ ] ntlmHash = ntlmHash ( password ) ; return lmResponse ( ntlmHash , challenge ) ;
public class RedisClient { /** * ( non - Javadoc ) * @ see com . impetus . kundera . persistence . api . Batcher # executeBatch ( ) */ @ Override public int executeBatch ( ) { } }
Object connection = getConnection ( ) ; // Create a hashset and populate data into it Pipeline pipeLine = null ; if ( isBoundTransaction ( ) ) { pipeLine = ( ( Jedis ) connection ) . pipelined ( ) ; } try { for ( Node node : nodes ) { if ( node . isDirty ( ) ) { node . handlePreEvent ( ) ; // delete can not be executed in batch if ( node . isInState ( RemovedState . class ) ) { onDelete ( node . getData ( ) , node . getEntityId ( ) , pipeLine != null ? pipeLine : connection ) ; } else { List < RelationHolder > relationHolders = getRelationHolders ( node ) ; EntityMetadata metadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , node . getDataClass ( ) ) ; onPersist ( metadata , node . getData ( ) , node . getEntityId ( ) , relationHolders , pipeLine != null ? pipeLine : connection ) ; } node . handlePostEvent ( ) ; } } } finally { if ( pipeLine != null ) { pipeLine . sync ( ) ; // send I / O . . as persist call . so no need to // read // response ? } onCleanup ( connection ) ; } return nodes . size ( ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link TexturedSurfaceType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link TexturedSurfaceType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/texturedsurface/1.0" , name = "TexturedSurface" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "OrientableSurface" ) public JAXBElement < TexturedSurfaceType > createTexturedSurface ( TexturedSurfaceType value ) { } }
return new JAXBElement < TexturedSurfaceType > ( _TexturedSurface_QNAME , TexturedSurfaceType . class , null , value ) ;
public class AbsListViewFragment { /** * The default content for a ListFragment has a TextView that can * be shown when the list is empty . If you would like to have it * shown , call this method to supply the text it should use . */ public void setEmptyText ( CharSequence text ) { } }
ensureList ( ) ; if ( mStandardEmptyView == null ) { throw new IllegalStateException ( "Can't be used with a custom content view" ) ; } mStandardEmptyView . setText ( text ) ; if ( mEmptyText == null ) { mList . setEmptyView ( mStandardEmptyView ) ; } mEmptyText = text ;
public class ProviderInfoWeightManager { /** * Recover weight of provider info , and set default status * @ param providerInfo ProviderInfo * @ param originWeight origin weight */ public static void recoverOriginWeight ( ProviderInfo providerInfo , int originWeight ) { } }
providerInfo . setStatus ( ProviderStatus . AVAILABLE ) ; providerInfo . setWeight ( originWeight ) ;
public class JDesktopPaneLayout { /** * Validate the layout after internal frames have been added * or removed */ public void validate ( ) { } }
Dimension size = desktopPane . getSize ( ) ; size . height -= computeDesktopIconsSpace ( ) ; layoutInternalFrames ( size ) ;
public class SampledRowKeysAdapter { /** * < p > adaptResponse . < / p > * @ param responses a { @ link java . util . List } object . * @ return a { @ link java . util . List } object . */ public List < HRegionLocation > adaptResponse ( List < KeyOffset > responses ) { } }
List < HRegionLocation > regions = new ArrayList < > ( ) ; // Starting by the first possible row , iterate over the sorted sampled row keys and create regions . byte [ ] startKey = HConstants . EMPTY_START_ROW ; for ( KeyOffset response : responses ) { byte [ ] endKey = response . getKey ( ) . toByteArray ( ) ; // Avoid empty regions . if ( Bytes . equals ( startKey , endKey ) ) { continue ; } regions . add ( createRegionLocation ( startKey , endKey ) ) ; startKey = endKey ; } // Create one last region if the last region doesn ' t reach the end or there are no regions . byte [ ] endKey = HConstants . EMPTY_END_ROW ; if ( regions . isEmpty ( ) || ! Bytes . equals ( startKey , endKey ) ) { regions . add ( createRegionLocation ( startKey , endKey ) ) ; } return regions ;
public class ConfigRuleComplianceFiltersMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ConfigRuleComplianceFilters configRuleComplianceFilters , ProtocolMarshaller protocolMarshaller ) { } }
if ( configRuleComplianceFilters == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( configRuleComplianceFilters . getConfigRuleName ( ) , CONFIGRULENAME_BINDING ) ; protocolMarshaller . marshall ( configRuleComplianceFilters . getComplianceType ( ) , COMPLIANCETYPE_BINDING ) ; protocolMarshaller . marshall ( configRuleComplianceFilters . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( configRuleComplianceFilters . getAwsRegion ( ) , AWSREGION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Entry { /** * Returns a newly - created { @ link Entry } of a directory . * @ param revision the revision of the directory * @ param path the path of the directory */ public static Entry < Void > ofDirectory ( Revision revision , String path ) { } }
return new Entry < > ( revision , path , EntryType . DIRECTORY , null ) ;
public class SimpleJob { /** * Easily supports the Join . To use the setSimpleJoin , * you must be a size master data appear in the memory of the task . * @ param masterLabels label of master data * @ param masterColumns master column ' s * @ param dataColumns data column ' s * @ param masterPath master data HDFS path * @ return this * @ throws DataFormatException */ public SimpleJob setSimpleJoin ( String [ ] masterLabels , String [ ] masterColumns , String [ ] dataColumns , String masterPath ) throws DataFormatException { } }
String separator = conf . get ( SEPARATOR ) ; return setSimpleJoin ( masterLabels , masterColumns , dataColumns , masterPath , separator , false ) ;
public class SessionUtil { /** * Extracts post back url from the HTML returned by the IDP * @ param html * @ return */ static private String getPostBackUrlFromHTML ( String html ) { } }
Document doc = Jsoup . parse ( html ) ; Elements e1 = doc . getElementsByTag ( "body" ) ; Elements e2 = e1 . get ( 0 ) . getElementsByTag ( "form" ) ; String postBackUrl = e2 . first ( ) . attr ( "action" ) ; return postBackUrl ;
public class SignalRsInner { /** * Regenerate SignalR service access key . PrimaryKey and SecondaryKey cannot be regenerated at the same time . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param resourceName The name of the SignalR resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < SignalRKeysInner > > regenerateKeyWithServiceResponseAsync ( String resourceGroupName , String resourceName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( resourceName == null ) { throw new IllegalArgumentException ( "Parameter resourceName is required and cannot be null." ) ; } final KeyType keyType = null ; RegenerateKeyParameters parameters = new RegenerateKeyParameters ( ) ; parameters . withKeyType ( null ) ; Observable < Response < ResponseBody > > observable = service . regenerateKey ( this . client . subscriptionId ( ) , resourceGroupName , resourceName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPostOrDeleteResultAsync ( observable , new TypeToken < SignalRKeysInner > ( ) { } . getType ( ) ) ;
public class ExpressionsRetinaApiImpl { /** * { @ inheritDoc } */ @ Override public List < List < Term > > getSimilarTermsBulk ( PosType posType , Model ... models ) throws JsonProcessingException , ApiException { } }
return getSimilarTermsBulk ( null , posType , models ) ;
public class CommerceAccountUserRelLocalServiceUtil { /** * Returns a range of all the commerce account user rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . account . model . impl . CommerceAccountUserRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce account user rels * @ param end the upper bound of the range of commerce account user rels ( not inclusive ) * @ return the range of commerce account user rels */ public static java . util . List < com . liferay . commerce . account . model . CommerceAccountUserRel > getCommerceAccountUserRels ( int start , int end ) { } }
return getService ( ) . getCommerceAccountUserRels ( start , end ) ;
public class CmsJspStandardContextBean { /** * Initializes the requested container page . < p > * @ throws CmsException in case reading the requested resource fails */ public void initPage ( ) throws CmsException { } }
if ( ( m_page == null ) && ( m_cms != null ) ) { String requestUri = m_cms . getRequestContext ( ) . getUri ( ) ; // get the container page itself , checking the history first CmsResource pageResource = ( CmsResource ) CmsHistoryResourceHandler . getHistoryResource ( m_request ) ; if ( pageResource == null ) { pageResource = m_cms . readResource ( requestUri ) ; } m_page = getPage ( pageResource ) ; m_page = CmsTemplateMapper . get ( m_request ) . transformContainerpageBean ( m_cms , m_page , pageResource . getRootPath ( ) ) ; }
public class VFSUtils { /** * Fix a name ( removes any trailing slash ) * @ param name the name to fix * @ return the fixed name * @ throws IllegalArgumentException for a null name */ public static String fixName ( String name ) { } }
if ( name == null ) { throw MESSAGES . nullArgument ( "name" ) ; } int length = name . length ( ) ; if ( length <= 1 ) { return name ; } if ( name . charAt ( length - 1 ) == '/' ) { return name . substring ( 0 , length - 1 ) ; } return name ;
public class Host2NodesMap { /** * Check if node is already in the map . */ boolean contains ( DatanodeDescriptor node ) { } }
if ( node == null ) { return false ; } String host = node . getHost ( ) ; hostmapLock . readLock ( ) . lock ( ) ; try { DatanodeDescriptor [ ] nodes = map . get ( host ) ; if ( nodes != null ) { for ( DatanodeDescriptor containedNode : nodes ) { if ( node == containedNode ) { return true ; } } } } finally { hostmapLock . readLock ( ) . unlock ( ) ; } return false ;
public class Rule { /** * Creates a copy of this Rule with a new instance of the generated rule class if present . * This prevents sharing instances across threads , which is not supported for performance reasons . * Otherwise the generated code would need to be thread safe , adding to the runtime overhead . * Instead we buy speed by spending more memory . * @ param functionRegistry the registered functions of the system * @ return a copy of this rule with a new instance of its generated code */ public Rule invokableCopy ( FunctionRegistry functionRegistry ) { } }
final Builder builder = toBuilder ( ) ; final Class < ? extends GeneratedRule > ruleClass = generatedRuleClass ( ) ; if ( ruleClass != null ) { try { // noinspection unchecked final Set < Constructor > constructors = ReflectionUtils . getConstructors ( ruleClass ) ; final Constructor onlyElement = Iterables . getOnlyElement ( constructors ) ; final GeneratedRule instance = ( GeneratedRule ) onlyElement . newInstance ( functionRegistry ) ; builder . generatedRule ( instance ) ; } catch ( IllegalAccessException | InstantiationException | InvocationTargetException e ) { LOG . warn ( "Unable to generate code for rule {}: {}" , id ( ) , e ) ; } } return builder . build ( ) ;