signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LBiObjDblPredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T1 , T2 > LBiObjDblPredicateBuilder < T1 , T2 > biObjDblPredicate ( Consumer < LBiObjDblPredicate < T1 , T2 > > consumer ) { } }
return new LBiObjDblPredicateBuilder ( consumer ) ;
public class KnowledgeBaseImpl { /** * globals class types must be re - wired after serialization * @ throws ClassNotFoundException */ private void populateGlobalsMap ( Map < String , String > globs ) throws ClassNotFoundException { } }
this . globals = new HashMap < String , Class < ? > > ( ) ; for ( Map . Entry < String , String > entry : globs . entrySet ( ) ) { addGlobal ( entry . getKey ( ) , this . rootClassLoader . loadClass ( entry . getValue ( ) ) ) ; }
public class MetatagsRecord { /** * Replaces the metatags for a metric . Metatags cannot use any of the reserved tag names . * @ param metatags A key - value pairs of metatags . Cannot be null or empty . * @ param key A unique identifier to be used while indexing this metatags into a schema db . */ public void setMetatags ( Map < String , String > metatags , String key ) { } }
if ( metatags != null ) { TSDBEntity . validateTags ( metatags ) ; _metatags . clear ( ) ; _metatags . putAll ( metatags ) ; _key = key ; }
public class DescribePipelinesResult { /** * An array of descriptions for the specified pipelines . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPipelineDescriptionList ( java . util . Collection ) } or * { @ link # withPipelineDescriptionList ( java . util . Collection ) } if you want to override the existing values . * @ param pipelineDescriptionList * An array of descriptions for the specified pipelines . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribePipelinesResult withPipelineDescriptionList ( PipelineDescription ... pipelineDescriptionList ) { } }
if ( this . pipelineDescriptionList == null ) { setPipelineDescriptionList ( new com . amazonaws . internal . SdkInternalList < PipelineDescription > ( pipelineDescriptionList . length ) ) ; } for ( PipelineDescription ele : pipelineDescriptionList ) { this . pipelineDescriptionList . add ( ele ) ; } return this ;
public class CmsResourceTypeStatResultList { /** * Deletes entries which are older than MAX _ TIME . < p > */ public void deleteOld ( ) { } }
Iterator < CmsResourceTypeStatResult > iterator = m_results . iterator ( ) ; while ( iterator . hasNext ( ) ) { CmsResourceTypeStatResult res = iterator . next ( ) ; if ( isToOld ( res ) ) { iterator . remove ( ) ; } }
public class TypeFactory { /** * org . glassfish . hk2 . utilities . Binder */ @ Override public void bind ( DynamicConfiguration config ) { } }
Injections . addBinding ( Injections . newFactoryBinder ( this ) . to ( type ) . in ( Singleton . class ) , config ) ; Injections . addBinding ( Injections . newBinder ( this ) . to ( ValueFactoryProvider . class ) , config ) ;
public class Configurer { /** * Get the node at the following path . * @ param path The node path . * @ return The node found . * @ throws LionEngineException If node not found . */ private Xml getNode ( String ... path ) { } }
Xml node = root ; for ( final String element : path ) { try { node = node . getChild ( element ) ; } catch ( final LionEngineException exception ) { throw new LionEngineException ( exception , media ) ; } } return node ;
public class ServletContextInitParameterPhrase { /** * { @ inheritDoc } */ public Object evaluate ( TaskRequest req , TaskResponse res ) { } }
HttpServletRequest hreq = ( HttpServletRequest ) source . evaluate ( req , res ) ; String parameterName = ( String ) parameter_name . evaluate ( req , res ) ; return hreq . getSession ( ) . getServletContext ( ) . getInitParameter ( parameterName ) ;
public class CorporationApi { /** * Track corporation members Returns additional information about a * corporation & # 39 ; s members which helps tracking their activities - - - This * route is cached for up to 3600 seconds - - - Requires one of the following * EVE corporation role ( s ) : Director SSO Scope : * esi - corporations . track _ members . v1 * @ param corporationId * An EVE corporation ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ return List & lt ; CorporationMemberTrackingResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < CorporationMemberTrackingResponse > getCorporationsCorporationIdMembertracking ( Integer corporationId , String datasource , String ifNoneMatch , String token ) throws ApiException { } }
ApiResponse < List < CorporationMemberTrackingResponse > > resp = getCorporationsCorporationIdMembertrackingWithHttpInfo ( corporationId , datasource , ifNoneMatch , token ) ; return resp . getData ( ) ;
public class Contract { /** * syntactic sugar */ public Reference addAuthority ( ) { } }
Reference t = new Reference ( ) ; if ( this . authority == null ) this . authority = new ArrayList < Reference > ( ) ; this . authority . add ( t ) ; return t ;
public class RedisQueue { /** * { @ inheritDoc } * @ throws QueueException . EphemeralIsFull * if the ephemeral storage is full */ @ Override public IQueueMessage < ID , DATA > take ( ) throws QueueException . EphemeralIsFull { } }
if ( ! isEphemeralDisabled ( ) ) { int ephemeralMaxSize = getEphemeralMaxSize ( ) ; if ( ephemeralMaxSize > 0 && ephemeralSize ( ) >= ephemeralMaxSize ) { throw new QueueException . EphemeralIsFull ( ephemeralMaxSize ) ; } } try ( Jedis jedis = getJedisConnector ( ) . getJedis ( ) ) { long now = System . currentTimeMillis ( ) ; Object response = jedis . eval ( getScriptTake ( ) , 0 , String . valueOf ( now ) ) ; if ( response == null ) { return null ; } return deserialize ( response instanceof byte [ ] ? ( byte [ ] ) response : response . toString ( ) . getBytes ( QueueUtils . UTF8 ) ) ; }
public class CreateTopicRuleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateTopicRuleRequest createTopicRuleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createTopicRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createTopicRuleRequest . getRuleName ( ) , RULENAME_BINDING ) ; protocolMarshaller . marshall ( createTopicRuleRequest . getTopicRulePayload ( ) , TOPICRULEPAYLOAD_BINDING ) ; protocolMarshaller . marshall ( createTopicRuleRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AnnotationsClassLoader { /** * Find the resource with the given name , and return an input stream * that can be used for reading it . The search order is as described * for < code > getResource ( ) < / code > , after checking to see if the resource * data has been previously cached . If the resource cannot be found , * return < code > null < / code > . * @ param name Name of the resource to return an input stream for */ public InputStream getResourceAsStream ( String name ) { } }
if ( log . isDebugEnabled ( ) ) log . debug ( "getResourceAsStream(" + name + ")" ) ; InputStream stream = null ; // (0 ) Check for a cached copy of this resource stream = findLoadedResource ( name ) ; if ( stream != null ) { if ( log . isDebugEnabled ( ) ) log . debug ( " --> Returning stream from cache" ) ; return ( stream ) ; } // (1 ) Delegate to parent if requested if ( delegate ) { if ( log . isDebugEnabled ( ) ) log . debug ( " Delegating to parent classloader " + parent ) ; ClassLoader loader = parent ; if ( loader == null ) loader = system ; stream = loader . getResourceAsStream ( name ) ; if ( stream != null ) { // FIXME - cache ? ? ? if ( log . isDebugEnabled ( ) ) log . debug ( " --> Returning stream from parent" ) ; return ( stream ) ; } } // (2 ) Search local repositories if ( log . isDebugEnabled ( ) ) log . debug ( " Searching local repositories" ) ; URL url = findResource ( name ) ; if ( url != null ) { // FIXME - cache ? ? ? if ( log . isDebugEnabled ( ) ) log . debug ( " --> Returning stream from local" ) ; stream = findLoadedResource ( name ) ; try { if ( hasExternalRepositories && ( stream == null ) ) stream = url . openStream ( ) ; } catch ( IOException e ) { ; // Ignore } if ( stream != null ) return ( stream ) ; } // (3 ) Delegate to parent unconditionally if ( ! delegate ) { if ( log . isDebugEnabled ( ) ) log . debug ( " Delegating to parent classloader unconditionally " + parent ) ; ClassLoader loader = parent ; if ( loader == null ) loader = system ; stream = loader . getResourceAsStream ( name ) ; if ( stream != null ) { // FIXME - cache ? ? ? if ( log . isDebugEnabled ( ) ) log . debug ( " --> Returning stream from parent" ) ; return ( stream ) ; } } // (4 ) Resource was not found if ( log . isDebugEnabled ( ) ) log . debug ( " --> Resource not found, returning null" ) ; return ( null ) ;
public class JobsResource { /** * Create a job given the definition in { @ code job } . * @ param job The job to create . * @ param username The user creating the job . * @ return The response . */ @ POST @ Produces ( APPLICATION_JSON ) @ Timed @ ExceptionMetered public CreateJobResponse post ( @ Valid final Job job , @ RequestUser final String username ) { } }
final Job . Builder clone = job . toBuilder ( ) . setCreatingUser ( username ) . setCreated ( clock . now ( ) . getMillis ( ) ) // If the job had a hash coming in , preserve it . setHash ( job . getId ( ) . getHash ( ) ) ; final Job actualJob = clone . build ( ) ; final Collection < String > errors = jobValidator . validate ( actualJob ) ; final String jobIdString = actualJob . getId ( ) . toString ( ) ; if ( ! errors . isEmpty ( ) ) { throw badRequest ( new CreateJobResponse ( INVALID_JOB_DEFINITION , ImmutableList . copyOf ( errors ) , jobIdString ) ) ; } try { model . addJob ( actualJob ) ; } catch ( JobExistsException e ) { throw badRequest ( new CreateJobResponse ( JOB_ALREADY_EXISTS , ImmutableList . < String > of ( ) , jobIdString ) ) ; } log . info ( "created job: {}" , actualJob ) ; return new CreateJobResponse ( CreateJobResponse . Status . OK , ImmutableList . < String > of ( ) , jobIdString ) ;
public class FileDisk { /** * Creates a new { @ code FileDisk } of the specified size . The * { @ code FileDisk } returned by this method will be writable . * @ param file the file to hold the { @ code FileDisk } contents * @ param size the size of the new { @ code FileDisk } * @ return the created { @ code FileDisk } instance * @ throws IOException on error creating the { @ code FileDisk } * @ throws IllegalArgumentException if size is & lt ; 0 */ public static FileDisk create ( File file , long size ) throws IOException , IllegalArgumentException { } }
if ( size < 0 ) { throw new IllegalArgumentException ( "size must be >= 0" ) ; } try { final RandomAccessFile raf = new RandomAccessFile ( file , "rw" ) ; // NOI18N raf . setLength ( size ) ; return new FileDisk ( raf , false ) ; } catch ( FileNotFoundException ex ) { throw new IOException ( ex ) ; }
public class ContractAnalyzer { /** * Returns the MethodHandle objects matching the specified criteria . * @ param kind the kind of the handles * @ param name the target method name * @ param desc the target method descriptor * @ param extraCount the number of extra parameters in the contract * method * @ return a list containing the requested handles */ @ Requires ( { } }
"kind != null" , "name != null" , "desc != null" , "extraCount >= 0" } ) @ Ensures ( { "result != null" , "!result.contains(null)" } ) List < MethodContractHandle > getMethodHandles ( ContractKind kind , String name , String desc , int extraCount ) { ArrayList < MethodContractHandle > candidates = methodHandles . get ( name ) ; if ( candidates == null ) { return Collections . emptyList ( ) ; } ArrayList < MethodContractHandle > matched = new ArrayList < MethodContractHandle > ( ) ; for ( MethodContractHandle h : candidates ) { if ( kind . equals ( h . getKind ( ) ) && descArgumentsMatch ( desc , h . getContractMethod ( ) . desc , extraCount ) ) { matched . add ( h ) ; } } return matched ;
public class ProposalMarketplaceInfo { /** * Sets the negotiationStatus value for this ProposalMarketplaceInfo . * @ param negotiationStatus * The negotiation status of the { @ link Proposal } . * This attribute is read - only . */ public void setNegotiationStatus ( com . google . api . ads . admanager . axis . v201902 . NegotiationStatus negotiationStatus ) { } }
this . negotiationStatus = negotiationStatus ;
public class JOGLTypeConversions { /** * Convert types from GL constants . * @ param type The GL constant . * @ return The value . */ public static JCGLScalarType scalarTypeFromGL ( final int type ) { } }
switch ( type ) { case GL . GL_HALF_FLOAT : return JCGLScalarType . TYPE_HALF_FLOAT ; case GL . GL_BYTE : return JCGLScalarType . TYPE_BYTE ; case GL . GL_UNSIGNED_BYTE : return JCGLScalarType . TYPE_UNSIGNED_BYTE ; case GL . GL_SHORT : return JCGLScalarType . TYPE_SHORT ; case GL . GL_UNSIGNED_SHORT : return JCGLScalarType . TYPE_UNSIGNED_SHORT ; case GL2ES2 . GL_INT : return JCGLScalarType . TYPE_INT ; case GL . GL_UNSIGNED_INT : return JCGLScalarType . TYPE_UNSIGNED_INT ; case GL . GL_FLOAT : return JCGLScalarType . TYPE_FLOAT ; default : throw new UnreachableCodeException ( ) ; }
public class PropertiesUtil { /** * Gets the named property as a boolean value . * @ param name the name of the property to look up * @ param defaultValue the default value to use if the property is undefined * @ return the boolean value of the property or { @ code defaultValue } if undefined . */ public boolean getBooleanProperty ( final String name , final boolean defaultValue ) { } }
final String prop = getStringProperty ( name ) ; return prop == null ? defaultValue : "true" . equalsIgnoreCase ( prop ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcPermeableCoveringOperationEnum ( ) { } }
if ( ifcPermeableCoveringOperationEnumEEnum == null ) { ifcPermeableCoveringOperationEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 865 ) ; } return ifcPermeableCoveringOperationEnumEEnum ;
public class BM25 { /** * Returns a relevance score between a term and a document based on a corpus . * @ param freq the frequency of searching term in the document to rank . * @ param docSize the size of document to rank . * @ param avgDocSize the average size of documents in the corpus . * @ param N the number of documents in the corpus . * @ param n the number of documents containing the given term in the corpus ; */ public double score ( double freq , int docSize , double avgDocSize , long N , long n ) { } }
if ( freq <= 0 ) return 0.0 ; double tf = freq * ( k1 + 1 ) / ( freq + k1 * ( 1 - b + b * docSize / avgDocSize ) ) ; double idf = Math . log ( ( N - n + 0.5 ) / ( n + 0.5 ) ) ; return ( tf + delta ) * idf ;
public class AstApply { private ValFrame colwise ( Env env , Env . StackHelp stk , Frame fr , AstPrimitive fun ) { } }
// Break each column into it ' s own Frame , then execute the function passing // the 1 argument . All columns are independent , and this loop should be // parallized over each column . Vec vecs [ ] = fr . vecs ( ) ; Val vals [ ] = new Val [ vecs . length ] ; AstRoot [ ] asts = new AstRoot [ ] { fun , null } ; for ( int i = 0 ; i < vecs . length ; i ++ ) { asts [ 1 ] = new AstFrame ( new Frame ( new String [ ] { fr . _names [ i ] } , new Vec [ ] { vecs [ i ] } ) ) ; try ( Env . StackHelp stk_inner = env . stk ( ) ) { vals [ i ] = fun . apply ( env , stk_inner , asts ) ; } } // All the resulting Vals must be the same scalar type ( and if ValFrames , // the columns must be the same count and type ) . Build a Frame result with // 1 row column per applied function result ( per column ) , and as many rows // as there are columns in the returned Frames . Val v0 = vals [ 0 ] ; Vec ovecs [ ] = new Vec [ vecs . length ] ; switch ( v0 . type ( ) ) { case Val . NUM : for ( int i = 0 ; i < vecs . length ; i ++ ) ovecs [ i ] = Vec . makeCon ( vals [ i ] . getNum ( ) , 1L ) ; // Since the zero column is a number , all must be numbers break ; case Val . FRM : long nrows = v0 . getFrame ( ) . numRows ( ) ; for ( int i = 0 ; i < vecs . length ; i ++ ) { Frame res = vals [ i ] . getFrame ( ) ; // Since the zero column is a frame , all must be frames if ( res . numCols ( ) != 1 ) throw new IllegalArgumentException ( "apply result Frames must have one column, found " + res . numCols ( ) + " cols" ) ; if ( res . numRows ( ) != nrows ) throw new IllegalArgumentException ( "apply result Frames must have all the same rows, found " + nrows + " rows and " + res . numRows ( ) ) ; ovecs [ i ] = res . vec ( 0 ) ; } break ; case Val . NUMS : for ( int i = 0 ; i < vecs . length ; i ++ ) ovecs [ i ] = Vec . makeCon ( vals [ i ] . getNums ( ) [ 0 ] , 1L ) ; break ; case Val . STRS : throw H2O . unimpl ( ) ; case Val . FUN : throw water . H2O . unimpl ( ) ; case Val . STR : throw water . H2O . unimpl ( ) ; default : throw water . H2O . unimpl ( ) ; } return new ValFrame ( new Frame ( fr . _names , ovecs ) ) ;
public class BSHPrimarySuffix { /** * Property access . * Must handle toLHS case . */ private Object doProperty ( boolean toLHS , Object obj , CallStack callstack , Interpreter interpreter ) throws EvalError { } }
if ( obj == Primitive . VOID ) throw new EvalError ( "Attempt to access property on undefined variable or class name" , this , callstack ) ; if ( obj instanceof Primitive ) throw new EvalError ( "Attempt to access property on a primitive" , this , callstack ) ; Object value = ( ( SimpleNode ) jjtGetChild ( 0 ) ) . eval ( callstack , interpreter ) ; if ( ! ( value instanceof String ) ) throw new EvalError ( "Property expression must be a String or identifier." , this , callstack ) ; if ( toLHS ) return new LHS ( obj , ( String ) value ) ; try { Object val = Reflect . getObjectProperty ( obj , ( String ) value ) ; return null == val ? Primitive . NULL : Primitive . unwrap ( val ) ; } catch ( ReflectError e ) { throw new EvalError ( "No such property: " + value , this , callstack , e ) ; }
public class Searcher { /** * Returns a text view with a given match . * @ param webElements the list of views * @ param match the match of the view to return * @ return the view with a given match */ private WebElement getViewFromList ( List < WebElement > webElements , int match ) { } }
WebElement webElementToReturn = null ; if ( webElements . size ( ) >= match ) { try { webElementToReturn = webElements . get ( -- match ) ; } catch ( Exception ignored ) { } } if ( webElementToReturn != null ) webElements . clear ( ) ; return webElementToReturn ;
public class BusLayerConstants { /** * Replies if the bus stops names should be drawn or not . * @ return < code > true < / code > if the bus stops are drawable ; * otherwise < code > false < / code > */ @ Pure public static boolean isBusStopNamesDrawable ( ) { } }
final Preferences prefs = Preferences . userNodeForPackage ( BusLayerConstants . class ) ; if ( prefs != null ) { return prefs . getBoolean ( "DRAW_BUS_STOPS_NAMES" , DEFAULT_BUS_STOP_NAMES_DRAWING ) ; // $ NON - NLS - 1 $ } return DEFAULT_BUS_STOP_NAMES_DRAWING ;
public class RemoteSender { /** * send message to remote asynchronously . * @ param nodeId the destination node id . */ private void sendToRemote ( String nodeId ) { } }
unitRequest . getContext ( ) . setDestinationNodeId ( nodeId ) ; /* unitRequest . getContext ( ) . setSourceNodeId ( LocalNodeManager . LOCAL _ NODE _ ID ) ; let the node instance to fill this source node id property */ LocalNodeManager . sendLoadBalanced ( unitRequest , callback ) ;
public class MetadataManager { /** * Try to build an default PBKey for convenience PB create method . * @ return PBKey or < code > null < / code > if default key was not declared in * metadata */ private PBKey buildDefaultKey ( ) { } }
List descriptors = connectionRepository ( ) . getAllDescriptor ( ) ; JdbcConnectionDescriptor descriptor ; PBKey result = null ; for ( Iterator iterator = descriptors . iterator ( ) ; iterator . hasNext ( ) ; ) { descriptor = ( JdbcConnectionDescriptor ) iterator . next ( ) ; if ( descriptor . isDefaultConnection ( ) ) { if ( result != null ) { log . error ( "Found additional connection descriptor with enabled 'default-connection' " + descriptor . getPBKey ( ) + ". This is NOT allowed. Will use the first found descriptor " + result + " as default connection" ) ; } else { result = descriptor . getPBKey ( ) ; } } } if ( result == null ) { log . info ( "No 'default-connection' attribute set in jdbc-connection-descriptors," + " thus it's currently not possible to use 'defaultPersistenceBroker()' " + " convenience method to lookup PersistenceBroker instances. But it's possible" + " to enable this at runtime using 'setDefaultKey' method." ) ; } return result ;
public class Validators { /** * The input object is not null . if yes , the check passes * @ param msg error message after verification failed * @ return Validation */ public static < T > Validation < T > notNull ( String msg ) { } }
return SimpleValidation . from ( Objects :: nonNull , msg ) ;
public class PermDAO { /** * Add description to this permission * @ param trans * @ param ns * @ param type * @ param instance * @ param action * @ param description * @ return */ public Result < Void > addDescription ( AuthzTrans trans , String ns , String type , String instance , String action , String description ) { } }
try { getSession ( trans ) . execute ( UPDATE_SP + TABLE + " SET description = '" + description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'" + "AND instance = '" + instance + "' AND action = '" + action + "';" ) ; } catch ( DriverException | APIException | IOException e ) { reportPerhapsReset ( trans , e ) ; return Result . err ( Result . ERR_Backend , CassAccess . ERR_ACCESS_MSG ) ; } Data data = new Data ( ) ; data . ns = ns ; data . type = type ; data . instance = instance ; data . action = action ; wasModified ( trans , CRUD . update , data , "Added description " + description + " to permission " + data . encode ( ) , null ) ; return Result . ok ( ) ;
public class BoxApiBookmark { /** * Gets a request that renames a bookmark * @ param id id of bookmark to rename * @ param newName id of bookmark to retrieve info on * @ return request to rename a bookmark */ public BoxRequestsBookmark . UpdateBookmark getRenameRequest ( String id , String newName ) { } }
BoxRequestsBookmark . UpdateBookmark request = new BoxRequestsBookmark . UpdateBookmark ( id , getBookmarkInfoUrl ( id ) , mSession ) ; request . setName ( newName ) ; return request ;
public class DrizzleStatement { /** * executes a select query . * @ param query the query to send to the server * @ return a result set * @ throws SQLException if something went wrong */ public ResultSet executeQuery ( final String query ) throws SQLException { } }
startTimer ( ) ; try { if ( queryResult != null ) { queryResult . close ( ) ; } final Query queryToSend = queryFactory . createQuery ( query ) ; queryResult = protocol . executeQuery ( queryToSend ) ; warningsCleared = false ; return new DrizzleResultSet ( queryResult , this , getProtocol ( ) ) ; } catch ( QueryException e ) { throw SQLExceptionMapper . get ( e ) ; } finally { stopTimer ( ) ; }
public class CommerceVirtualOrderItemLocalServiceBaseImpl { /** * Returns a range of all the commerce virtual order items . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . product . type . virtual . order . model . impl . CommerceVirtualOrderItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce virtual order items * @ param end the upper bound of the range of commerce virtual order items ( not inclusive ) * @ return the range of commerce virtual order items */ @ Override public List < CommerceVirtualOrderItem > getCommerceVirtualOrderItems ( int start , int end ) { } }
return commerceVirtualOrderItemPersistence . findAll ( start , end ) ;
public class LoginDialog { /** * sets Administrator . APIA / M if success , throws Exception if fails . */ public void tryLogin ( String protocol , String host , int port , String context , String user , String pass ) throws Exception { } }
try { logger . info ( "Logging in..." ) ; // get a FedoraClient String baseURL = protocol + "://" + host + ":" + port + "/" + context ; FedoraClient fc = new FedoraClient ( baseURL , user , pass ) ; // attempt to connect via REST String serverVersion = fc . getServerVersion ( ) ; // ensure client is compatible with server , warn if not List < String > compatibleVersions = FedoraClient . getCompatibleServerVersions ( ) ; if ( ! compatibleVersions . contains ( serverVersion ) ) { StringBuffer endText = new StringBuffer ( ) ; if ( compatibleVersions . size ( ) == 1 ) { // version A endText . append ( "version " + compatibleVersions . get ( 0 ) ) ; } else { // versions A and B // versions A , B , and C endText . append ( "versions " ) ; for ( int i = 0 ; i < compatibleVersions . size ( ) ; i ++ ) { if ( i > 0 ) { if ( i == compatibleVersions . size ( ) - 1 ) { if ( i > 1 ) { endText . append ( "," ) ; } endText . append ( " and " ) ; } else { endText . append ( ", " ) ; } } endText . append ( compatibleVersions . get ( i ) ) ; } } System . err . println ( "WARNING: Server version is " + serverVersion + ". This client is " + "only designed to work with " + endText . toString ( ) ) ; } // set SOAP stubs for Administrator Administrator . APIA = fc . getAPIAMTOM ( ) ; Administrator . APIM = fc . getAPIMMTOM ( ) ; } catch ( Exception e ) { if ( e . getMessage ( ) . indexOf ( "Unauthorized" ) != - 1 || e . getMessage ( ) . indexOf ( "Unrecognized" ) != - 1 ) { throw new IOException ( "Bad username or password." ) ; } else { if ( e . getMessage ( ) != null ) { throw new IOException ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; } else { throw new IOException ( e . getClass ( ) . getName ( ) ) ; } } }
public class ObjectRule { /** * Applies this schema rule to take the required code generation steps . * When this rule is applied for schemas of type object , the properties of * the schema are used to generate a new Java class and determine its * characteristics . See other implementers of { @ link Rule } for details . */ @ Override public JType apply ( String nodeName , JsonNode node , JsonNode parent , JPackage _package , Schema schema ) { } }
JType superType = reflectionHelper . getSuperType ( nodeName , node , _package , schema ) ; if ( superType . isPrimitive ( ) || reflectionHelper . isFinal ( superType ) ) { return superType ; } JDefinedClass jclass ; try { jclass = createClass ( nodeName , node , _package ) ; } catch ( ClassAlreadyExistsException e ) { return e . getExistingClass ( ) ; } jclass . _extends ( ( JClass ) superType ) ; schema . setJavaTypeIfEmpty ( jclass ) ; if ( node . has ( "title" ) ) { ruleFactory . getTitleRule ( ) . apply ( nodeName , node . get ( "title" ) , node , jclass , schema ) ; } if ( node . has ( "description" ) ) { ruleFactory . getDescriptionRule ( ) . apply ( nodeName , node . get ( "description" ) , node , jclass , schema ) ; } // Creates the class definition for the builder if ( ruleFactory . getGenerationConfig ( ) . isGenerateBuilders ( ) && ruleFactory . getGenerationConfig ( ) . isUseInnerClassBuilders ( ) ) { ruleFactory . getBuilderRule ( ) . apply ( nodeName , node , parent , jclass , schema ) ; } ruleFactory . getPropertiesRule ( ) . apply ( nodeName , node . get ( "properties" ) , node , jclass , schema ) ; if ( node . has ( "javaInterfaces" ) ) { addInterfaces ( jclass , node . get ( "javaInterfaces" ) ) ; } ruleFactory . getAdditionalPropertiesRule ( ) . apply ( nodeName , node . get ( "additionalProperties" ) , node , jclass , schema ) ; ruleFactory . getDynamicPropertiesRule ( ) . apply ( nodeName , node . get ( "properties" ) , node , jclass , schema ) ; if ( node . has ( "required" ) ) { ruleFactory . getRequiredArrayRule ( ) . apply ( nodeName , node . get ( "required" ) , node , jclass , schema ) ; } if ( ruleFactory . getGenerationConfig ( ) . isIncludeToString ( ) ) { addToString ( jclass ) ; } if ( ruleFactory . getGenerationConfig ( ) . isIncludeHashcodeAndEquals ( ) ) { addHashCode ( jclass , node ) ; addEquals ( jclass , node ) ; } if ( ruleFactory . getGenerationConfig ( ) . isParcelable ( ) ) { addParcelSupport ( jclass ) ; } if ( ruleFactory . getGenerationConfig ( ) . isIncludeConstructors ( ) ) { ruleFactory . getConstructorRule ( ) . apply ( nodeName , node , parent , jclass , schema ) ; } if ( ruleFactory . getGenerationConfig ( ) . isSerializable ( ) ) { SerializableHelper . addSerializableSupport ( jclass ) ; } return jclass ;
public class MethodInterceptorFactory { /** * { @ inheritDoc } */ public Interceptor create ( final InterceptorFactoryContext context ) { } }
final Map < Object , Object > map = context . getContextData ( ) ; if ( map . containsKey ( this ) ) { return ( Interceptor ) map . get ( this ) ; } else { final MethodInterceptor interceptor = new MethodInterceptor ( instanceFactory . createInstance ( context ) , interceptorMethod , changeMethod ) ; map . put ( this , interceptor ) ; return interceptor ; }
public class Cluster { /** * The nodes in the cluster . * @ param clusterNodes * The nodes in the cluster . */ public void setClusterNodes ( java . util . Collection < ClusterNode > clusterNodes ) { } }
if ( clusterNodes == null ) { this . clusterNodes = null ; return ; } this . clusterNodes = new com . amazonaws . internal . SdkInternalList < ClusterNode > ( clusterNodes ) ;
public class AbstractProcessMojo { /** * / * package private */ static ExecutorService createExecutorService ( final String threads ) { } }
final int threadCount ; if ( threads . endsWith ( "C" ) ) { threadCount = Integer . parseInt ( threads . substring ( 0 , threads . length ( ) - 1 ) ) * Runtime . getRuntime ( ) . availableProcessors ( ) ; } else { threadCount = Integer . parseInt ( threads ) ; } final ClassLoader currentThreadContextClassloader = Thread . currentThread ( ) . getContextClassLoader ( ) ; final AtomicInteger threadIndex = new AtomicInteger ( 1 ) ; return Executors . newFixedThreadPool ( threadCount , r -> { final Thread thread = new Thread ( r ) ; thread . setName ( "javassist-processor-" + threadIndex . getAndIncrement ( ) ) ; thread . setContextClassLoader ( currentThreadContextClassloader ) ; return thread ; } ) ;
public class MergeUsers { /** * Get query parameters * @ return Values of query parameters ( name of parameter : value of the parameter ) */ @ Override public Map < String , Object > getQueryParameters ( ) { } }
HashMap < String , Object > params = new HashMap < String , Object > ( ) ; if ( this . cascadeCreate != null ) { params . put ( "cascadeCreate" , this . cascadeCreate . toString ( ) ) ; } return params ;
public class BinaryHashPartition { /** * Inserts the given object into the current buffer . This method returns a pointer that * can be used to address the written record in this partition , if it is in - memory . * The returned pointers have no expressiveness in the case where the partition is spilled . * @ param record The object to be written to the partition . * @ return A pointer to the object in the partition , or < code > - 1 < / code > , if the partition is * spilled . * @ throws IOException Thrown , when this is a spilled partition and the write failed . */ final int insertIntoBuildBuffer ( BinaryRow record ) throws IOException { } }
this . buildSideRecordCounter ++ ; if ( isInMemory ( ) ) { final long pointer = this . buildSideWriteBuffer . getCurrentPointer ( ) ; int skip = this . buildSideSerializer . serializeToPages ( record , this . buildSideWriteBuffer ) ; if ( isInMemory ( ) ) { long ret = pointer + skip ; if ( ret > Integer . MAX_VALUE ) { throw new RuntimeException ( "Too more data in this partition: " + ret ) ; } return ( int ) ret ; } else { return - 1 ; } } else { this . buildSideSerializer . serializeToPages ( record , this . buildSideWriteBuffer ) ; return - 1 ; }
public class PythonDataStream { /** * A thin wrapper layer over { @ link DataStream # writeToSocket ( String , int , org . apache . flink . api . common . serialization . SerializationSchema ) } . * @ param host host of the socket * @ param port port of the socket * @ param schema schema for serialization */ @ PublicEvolving public void write_to_socket ( String host , Integer port , SerializationSchema < PyObject > schema ) throws IOException { } }
stream . writeToSocket ( host , port , new PythonSerializationSchema ( schema ) ) ;
public class Mediawiki { /** * main instance - this is the non - static version of main - it will run as a * static main would but return it ' s exitCode to the static main the static * main will then decide whether to do a System . exit ( exitCode ) or not . * @ param args * - command line arguments * @ return - the exit Code to be used by the static main program */ protected int maininstance ( String [ ] args ) { } }
parser = new CmdLineParser ( this ) ; try { parser . parseArgument ( args ) ; if ( debug ) showVersion ( ) ; if ( this . showVersion ) { showVersion ( ) ; } else if ( this . showHelp ) { showHelp ( ) ; } else { // FIXME - do something // implement actions System . err . println ( "Commandline interface is not functional in " + VERSION + " yet" ) ; exitCode = 1 ; // exitCode = 0; } } catch ( CmdLineException e ) { // handling of wrong arguments usage ( e . getMessage ( ) ) ; } catch ( Exception e ) { handle ( e ) ; exitCode = 1 ; } return exitCode ;
public class DeepBlockRewriter { /** * the base method and doesn ' t know that it will be run automatically ) */ private boolean forbidUseOfSuperInFixtureMethod ( MethodCallExpression expr ) { } }
Method currMethod = resources . getCurrentMethod ( ) ; Expression target = expr . getObjectExpression ( ) ; if ( currMethod instanceof FixtureMethod && target instanceof VariableExpression && ( ( VariableExpression ) target ) . isSuperExpression ( ) && currMethod . getName ( ) . equals ( expr . getMethodAsString ( ) ) ) { resources . getErrorReporter ( ) . error ( expr , "A base class fixture method should not be called explicitly " + "because it is always invoked automatically by the framework" ) ; return true ; } return false ;
public class PropClient { /** * Entry point for running the PropClient . * An IP host or port identifier has to be supplied to specify the endpoint for the * KNX network access . < br > * To show the usage message of this tool on the console , supply the command line * option - help ( or - h ) . < br > * Command line options are treated case sensitive . Available options for the property * client : * < ul > * < li > < code > - help - h < / code > show help message < / li > * < li > < code > - version < / code > show tool / library version and exit < / li > * < li > < code > - verbose - v < / code > enable verbose status output < / li > * < li > < code > - local - l < / code > local device management < / li > * < li > < code > - remote - r < / code > < i > KNX addr < / i > & nbsp ; remote property service < / li > * < li > < code > - definitions - d < / code > < i > file < / i > & nbsp ; use property definition file < / li > * < li > < code > - localhost < / code > < i > id < / i > & nbsp ; local IP / host name < / li > * < li > < code > - localport < / code > < i > number < / i > & nbsp ; local UDP port ( default system * assigned ) < / li > * < li > < code > - port - p < / code > < i > number < / i > & nbsp ; UDP port on host ( default 3671 ) < / li > * < li > < code > - nat - n < / code > enable Network Address Translation < / li > * < li > < code > - serial - s < / code > use FT1.2 serial communication < / li > * < / ul > * For local device management these options are available : * < ul > * < li > < code > - emulatewriteenable - e < / code > check write - enable of a property < / li > * < / ul > * For remote property service these options are available : * < ul > * < li > < code > - routing < / code > use KNXnet / IP routing < / li > * < li > < code > - medium - m < / code > < i > id < / i > & nbsp ; KNX medium [ tp0 | tp1 | p110 | p132 | rf ] * ( defaults to tp1 ) < / li > * < li > < code > - connect - c < / code > connection oriented mode < / li > * < li > < code > - authorize - a < / code > < i > key < / i > & nbsp ; authorize key to access KNX * device < / li > * < / ul > * @ param args command line options for property client */ public static void main ( String [ ] args ) { } }
try { // read the command line options and run the client final Map options = new HashMap ( ) ; if ( parseOptions ( args , options ) ) new PropClient ( ) . run ( options ) ; } catch ( final Throwable t ) { if ( t . getMessage ( ) != null ) System . out . println ( t . getMessage ( ) ) ; }
public class SimpleResponseManager { protected void doWrite ( String text , String contentType ) { } }
assertArgumentNotNull ( "text" , text ) ; assertArgumentNotNull ( "contentType" , contentType ) ; doWrite ( text , contentType , deriveResponseEncoding ( ) ) ;
public class CallCredentialsHelper { /** * Creates a new call credential with the given username and password for basic auth . * < b > Note : < / b > This method uses experimental grpc - java - API features . * @ param username The username to use . * @ param password The password to use . * @ return The newly created basic auth credentials . */ public static CallCredentials basicAuth ( final String username , final String password ) { } }
final Metadata extraHeaders = new Metadata ( ) ; extraHeaders . put ( AUTHORIZATION_HEADER , encodeBasicAuth ( username , password ) ) ; return new StaticSecurityHeaderCallCredentials ( extraHeaders ) ;
public class CmsPositionBean { /** * Collects the position information of the given UI object and returns a position info bean . < p > * @ param element the object to read the position data from * @ return the position data */ public static CmsPositionBean generatePositionInfo ( Element element ) { } }
CmsPositionBean result = new CmsPositionBean ( ) ; result . setHeight ( element . getOffsetHeight ( ) ) ; result . setWidth ( element . getOffsetWidth ( ) ) ; result . setTop ( element . getAbsoluteTop ( ) ) ; result . setLeft ( element . getAbsoluteLeft ( ) ) ; return result ;
public class RoleDefinitionsInner { /** * Get role definition by name ( GUID ) . * @ param scope The scope of the role definition . * @ param roleDefinitionId The ID of the role definition . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RoleDefinitionInner object */ public Observable < RoleDefinitionInner > getAsync ( String scope , String roleDefinitionId ) { } }
return getWithServiceResponseAsync ( scope , roleDefinitionId ) . map ( new Func1 < ServiceResponse < RoleDefinitionInner > , RoleDefinitionInner > ( ) { @ Override public RoleDefinitionInner call ( ServiceResponse < RoleDefinitionInner > response ) { return response . body ( ) ; } } ) ;
public class EquivalentFragmentSets { /** * An { @ link EquivalentFragmentSet } that indicates a variable representing a schema concept with one of the * specified labels . */ public static EquivalentFragmentSet label ( VarProperty varProperty , Variable type , ImmutableSet < Label > labels ) { } }
return new AutoValue_LabelFragmentSet ( varProperty , type , labels ) ;
public class Es6RewriteBlockScopedFunctionDeclaration { /** * Rewrite the function declaration from : * < pre > * function f ( ) { } * FUNCTION * NAME x * PARAM _ LIST * BLOCK * < / pre > * to * < pre > * let f = function ( ) { } ; * LET * NAME f * FUNCTION * NAME ( w / empty string ) * PARAM _ LIST * BLOCK * < / pre > * This is similar to { @ link Normalize . NormalizeStatements # rewriteFunctionDeclaration } but * rewrites to " let " instead of " var " . */ private void visitBlockScopedFunctionDeclaration ( NodeTraversal t , Node n , Node parent ) { } }
// Prepare a spot for the function . Node oldNameNode = n . getFirstChild ( ) ; Node fnNameNode = oldNameNode . cloneNode ( ) ; Node let = IR . declaration ( fnNameNode , Token . LET ) . srcref ( n ) ; NodeUtil . addFeatureToScript ( t . getCurrentScript ( ) , Feature . LET_DECLARATIONS ) ; // Prepare the function . oldNameNode . setString ( "" ) ; compiler . reportChangeToEnclosingScope ( oldNameNode ) ; // Move the function to the front of the parent . parent . removeChild ( n ) ; parent . addChildToFront ( let ) ; compiler . reportChangeToEnclosingScope ( let ) ; fnNameNode . addChildToFront ( n ) ;
public class ManagedIndex { /** * Required by IndexEntryAccessor interface . */ public boolean isConsistent ( Storable indexEntry , S master ) throws FetchException { } }
return mAccessor . isConsistent ( indexEntry , master ) ;
public class CountingErrorConsumer { /** * Initializes the map of unknown categoricals per column with an unmodifiable and thread - safe implementation of { @ link Map } . * @ param model { @ link GenModel } the unknown categorical per column map is initialized for */ private void initializeUnknownCategoricalsPerColumn ( GenModel model ) { } }
unknownCategoricalsPerColumn = new ConcurrentHashMap < > ( ) ; for ( int i = 0 ; i < model . getNumCols ( ) ; i ++ ) { String [ ] domainValues = model . getDomainValues ( i ) ; if ( domainValues != null ) { unknownCategoricalsPerColumn . put ( model . getNames ( ) [ i ] , new AtomicLong ( ) ) ; } } unknownCategoricalsPerColumn = Collections . unmodifiableMap ( unknownCategoricalsPerColumn ) ;
public class JointParser { /** * 得到支持的依存关系类型集合 * @ return 词性标签集合 */ public Set < String > getSupportedTypes ( ) { } }
Set < String > typeset = new HashSet < String > ( ) ; Set < String > set = factory . DefaultLabelAlphabet ( ) . toSet ( ) ; Iterator < String > itt = set . iterator ( ) ; while ( itt . hasNext ( ) ) { String type = itt . next ( ) ; if ( type . length ( ) == 1 ) continue ; typeset . add ( type . substring ( 1 ) ) ; } return typeset ;
public class Request { /** * Gets the value of the provided attribute * @ param attribute The attribute value or null if not present * @ param < T > the type parameter . * @ return the value for the provided attribute */ @ SuppressWarnings ( "unchecked" ) public < T > T attribute ( String attribute ) { } }
return ( T ) servletRequest . getAttribute ( attribute ) ;
public class GIS { /** * Train a model using the GIS algorithm . * @ param iterations The number of GIS iterations to perform . * @ param indexer The object which will be used for event compilation . * @ param printMessagesWhileTraining Determines whether training status messages are written to STDOUT . * @ param smoothing Defines whether the created trainer will use smoothing while training the model . * @ return The newly trained model , which can be used immediately or saved * to disk using an opennlp . maxent . io . GISModelWriter object . */ public static GISModel trainModel ( int iterations , DataIndexer indexer , boolean printMessagesWhileTraining , boolean smoothing ) { } }
GISTrainer trainer = new GISTrainer ( printMessagesWhileTraining ) ; trainer . setSmoothing ( smoothing ) ; trainer . setSmoothingObservation ( SMOOTHING_OBSERVATION ) ; return trainer . trainModel ( iterations , indexer ) ;
public class Filters { /** * Shortcut for { @ link # registeredBy ( Class ) } for special scopes ( like classpath scan , bundles lookup etc ) . * @ param specialScope special scope type * @ param < T > expected info container type ( if used within single configuration type ) * @ return items registered in specified context filter */ public static < T extends ItemInfo > Predicate < T > registeredBy ( final ConfigScope specialScope ) { } }
return registeredBy ( specialScope . getType ( ) ) ;
public class ColorUtils { /** * Converts HSV color system to RGB * @ param h hue 0-360 * @ param s saturation 0-100 * @ param v value 0-100 * @ return RGB values in LibGDX { @ link Color } class */ public static Color HSVtoRGB ( float h , float s , float v ) { } }
Color c = new Color ( 1 , 1 , 1 , 1 ) ; HSVtoRGB ( h , s , v , c ) ; return c ;
public class RegularPactTask { /** * This method is called at the end of a task , receiving the accumulators of * the task and the chained tasks . It merges them into a single map of * accumulators and sends them to the JobManager . * @ param chainedTasks * Each chained task might have accumulators which will be merged * with the accumulators of the stub . */ protected static void reportAndClearAccumulators ( Environment env , Map < String , Accumulator < ? , ? > > accumulators , ArrayList < ChainedDriver < ? , ? > > chainedTasks ) { } }
// We can merge here the accumulators from the stub and the chained // tasks . Type conflicts can occur here if counters with same name but // different type were used . for ( ChainedDriver < ? , ? > chainedTask : chainedTasks ) { Map < String , Accumulator < ? , ? > > chainedAccumulators = chainedTask . getStub ( ) . getRuntimeContext ( ) . getAllAccumulators ( ) ; AccumulatorHelper . mergeInto ( accumulators , chainedAccumulators ) ; } // Don ' t report if the UDF didn ' t collect any accumulators if ( accumulators . size ( ) == 0 ) { return ; } // Report accumulators to JobManager synchronized ( env . getAccumulatorProtocolProxy ( ) ) { try { env . getAccumulatorProtocolProxy ( ) . reportAccumulatorResult ( new AccumulatorEvent ( env . getJobID ( ) , accumulators , true ) ) ; } catch ( IOException e ) { throw new RuntimeException ( "Communication with JobManager is broken. Could not send accumulators." , e ) ; } } // We also clear the accumulators , since stub instances might be reused // ( e . g . in iterations ) and we don ' t want to count twice . This may not be // done before sending AccumulatorHelper . resetAndClearAccumulators ( accumulators ) ; for ( ChainedDriver < ? , ? > chainedTask : chainedTasks ) { AccumulatorHelper . resetAndClearAccumulators ( chainedTask . getStub ( ) . getRuntimeContext ( ) . getAllAccumulators ( ) ) ; }
public class Es6RewriteRestAndSpread { /** * Processes a rest parameter */ private void visitRestParam ( NodeTraversal t , Node restParam , Node paramList ) { } }
Node functionBody = paramList . getNext ( ) ; int restIndex = paramList . getIndexOfChild ( restParam ) ; Node nameNode = restParam . getOnlyChild ( ) ; String paramName = nameNode . getString ( ) ; // Swap the existing param into the list , moving requisite AST annotations . nameNode . setVarArgs ( true ) ; nameNode . setJSDocInfo ( restParam . getJSDocInfo ( ) ) ; paramList . replaceChild ( restParam , nameNode . detach ( ) ) ; // Make sure rest parameters are typechecked . JSDocInfo inlineInfo = restParam . getJSDocInfo ( ) ; JSDocInfo functionInfo = NodeUtil . getBestJSDocInfo ( paramList . getParent ( ) ) ; final JSTypeExpression paramTypeAnnotation ; if ( inlineInfo != null ) { paramTypeAnnotation = inlineInfo . getType ( ) ; } else if ( functionInfo != null ) { paramTypeAnnotation = functionInfo . getParameterType ( paramName ) ; } else { paramTypeAnnotation = null ; } // TODO ( lharker ) : we should report this error in typechecking , not during transpilation , so // that it also occurs when natively typechecking ES6. if ( paramTypeAnnotation != null && paramTypeAnnotation . getRoot ( ) . getToken ( ) != Token . ELLIPSIS ) { compiler . report ( JSError . make ( restParam , BAD_REST_PARAMETER_ANNOTATION ) ) ; } if ( ! functionBody . hasChildren ( ) ) { // If function has no body , we are done ! t . reportCodeChange ( ) ; return ; } // Don ' t insert these directly , just clone them . Node newArrayName = IR . name ( REST_PARAMS ) . setJSType ( arrayType ) ; Node cursorName = IR . name ( REST_INDEX ) . setJSType ( numberType ) ; Node newBlock = IR . block ( ) . useSourceInfoFrom ( functionBody ) ; Node name = IR . name ( paramName ) ; Node let = IR . let ( name , newArrayName ) . useSourceInfoIfMissingFromForTree ( functionBody ) ; newBlock . addChildToFront ( let ) ; NodeUtil . addFeatureToScript ( t . getCurrentScript ( ) , Feature . LET_DECLARATIONS ) ; for ( Node child : functionBody . children ( ) ) { newBlock . addChildToBack ( child . detach ( ) ) ; } Node newArrayDeclaration = IR . var ( newArrayName . cloneTree ( ) , arrayLitWithJSType ( ) ) ; functionBody . addChildToFront ( newArrayDeclaration . useSourceInfoIfMissingFromForTree ( restParam ) ) ; // TODO ( b / 74074478 ) : Use a general utility method instead of an inlined loop . Node copyLoop = IR . forNode ( IR . var ( cursorName . cloneTree ( ) , IR . number ( restIndex ) . setJSType ( numberType ) ) , IR . lt ( cursorName . cloneTree ( ) , IR . getprop ( IR . name ( "arguments" ) , IR . string ( "length" ) ) . setJSType ( numberType ) ) . setJSType ( boolType ) , IR . inc ( cursorName . cloneTree ( ) , false ) . setJSType ( numberType ) , IR . block ( IR . exprResult ( IR . assign ( IR . getelem ( newArrayName . cloneTree ( ) , IR . sub ( cursorName . cloneTree ( ) , IR . number ( restIndex ) . setJSType ( numberType ) ) . setJSType ( numberType ) ) , IR . getelem ( IR . name ( "arguments" ) , cursorName . cloneTree ( ) ) . setJSType ( numberType ) ) . setJSType ( numberType ) ) ) ) . useSourceInfoIfMissingFromForTree ( restParam ) ; functionBody . addChildAfter ( copyLoop , newArrayDeclaration ) ; functionBody . addChildToBack ( newBlock ) ; compiler . reportChangeToEnclosingScope ( newBlock ) ; // For now , we are running transpilation before type - checking , so we ' ll // need to make sure changes don ' t invalidate the JSDoc annotations . // Therefore we keep the parameter list the same length and only initialize // the values if they are set to undefined . // TODO ( lharker ) : the above comment is out of date since we move transpilation after // typechecking . see if we can improve transpilation and not keep the parameter list the // same length ?
public class FieldSupport { /** * set accessible to ' true ' then set field value * @ param obj the object whose field should be modified * @ param toSet the new value for the field of obj being modified * @ return < code > this < / code > */ public FieldSupport < FieldType , ObjectType > set ( ObjectType obj , Object toSet ) { } }
try { f . setAccessible ( true ) ; f . set ( obj , toSet ) ; return this ; } catch ( Exception e ) { throw $ ( e ) ; }
public class MobileProviderService { /** * Creates a new RemoteWebDriver instance from the first MobileProvider that supports the specified * nodeType . * @ param nodeType - The { @ link MobileNodeType } to use for creating this mobile remote driver . * @ param platform - The { @ link WebDriverPlatform } . * @ param command - The commandExecutor . * @ param url - The URL to use for the remote grid . * @ param caps - The desired capabilities for this new web driver instance . * @ return A new RemoteWebDriver instance . */ public RemoteWebDriver createDriver ( MobileNodeType nodeType , WebDriverPlatform platform , CommandExecutor command , URL url , Capabilities caps ) { } }
if ( mobileProviders . containsKey ( nodeType ) ) { logger . log ( Level . FINE , "Found mobile driver provider that supports " + nodeType ) ; return mobileProviders . get ( nodeType ) . createDriver ( platform , command , url , caps ) ; } logger . severe ( "Did not found a mobile driver provider that supports " + nodeType ) ; return null ;
public class BasicModMixer { /** * Will mix # count 24bit signed samples in stereo into the two buffer . * The buffers will contain 24Bit signed samples . * @ param leftBuffer * @ param rightBuffer * @ param count * @ return # of samples mixed , - 1 if mixing finished */ public int mixIntoBuffer ( final int [ ] leftBuffer , final int [ ] rightBuffer , final int count ) { } }
// try if ( modFinished ) return - 1 ; int bufferIdx = 0 ; // Index into the buffer int endIndex = samplePerTicks ; // where ! will ! we be after next mixing ( will this still fit ? ! ) final int maxEndIndex = count - Helpers . VOL_RAMP_LEN ; if ( maxEndIndex < samplePerTicks ) throw new RuntimeException ( "The mixing buffer is too small. Minimum are " + samplePerTicks + " sample frames" ) ; while ( endIndex < maxEndIndex && ! modFinished ) { for ( int c = 0 ; c < maxChannels ; c ++ ) { // if ( c ! = 1 ) continue ; / / TODO : COMMENT THIS OUT AGAIN IF FINISHED WITH DEBUGGING ChannelMemory actMemo = channelMemory [ c ] ; // Mix this channel ? if ( ! actMemo . muted && isChannelActive ( actMemo ) ) { // fill in those samples mixChannelIntoBuffers ( leftBuffer , rightBuffer , bufferIdx , endIndex , actMemo ) ; // and get the ramp data for interweaving , if there is something left if ( ! actMemo . instrumentFinished ) fillRampDataIntoBuffers ( nvRampL , nvRampR , actMemo ) ; } } // Now Interweave with last ticks ramp buffer data for ( int n = 0 ; n < Helpers . VOL_RAMP_LEN ; n ++ ) { final int difFade = Helpers . VOL_RAMP_LEN - n ; leftBuffer [ bufferIdx + n ] = ( ( leftBuffer [ bufferIdx + n ] * n ) + ( vRampL [ n ] * difFade ) ) >> Helpers . VOL_RAMP_FRAC ; rightBuffer [ bufferIdx + n ] = ( ( rightBuffer [ bufferIdx + n ] * n ) + ( vRampR [ n ] * difFade ) ) >> Helpers . VOL_RAMP_FRAC ; // and copy in one step . . . vRampL [ n ] = nvRampL [ n ] ; vRampR [ n ] = nvRampR [ n ] ; nvRampL [ n ] = nvRampR [ n ] = 0 ; } bufferIdx += samplePerTicks ; modFinished = doTickEvents ( ) ; endIndex += samplePerTicks ; // tickevents can change samplePerTicks } return bufferIdx ; // catch ( Throwable ex ) // / / This is only needed for debugging porposes during playback , so // / / we know , where the error happend . . . // throw new RuntimeException ( this . getClass ( ) . getName ( ) + " " + currentPatternIndex + " [ " + currentRow + " ] " , ex ) ;
public class CmsContextInfo { /** * Sets the username . < p > * @ param userName the username to set * @ see CmsRequestContext # getCurrentUser ( ) */ public void setUserName ( String userName ) { } }
checkFrozen ( ) ; m_userName = userName ; setOuFqn ( CmsOrganizationalUnit . getParentFqn ( userName ) ) ;
public class VisOdomMonoPlaneInfinity { /** * Splits the set of active tracks into on plane and infinity sets . For each set also perform specific sanity * checks to make sure basic constraints are still being meet . If not then the track will not be considered for * motion estimation . */ private void sortTracksForEstimation ( ) { } }
// reset data structures planeSamples . reset ( ) ; farAngles . reset ( ) ; tracksOnPlane . clear ( ) ; tracksFar . clear ( ) ; // list of active tracks List < PointTrack > active = tracker . getActiveTracks ( null ) ; for ( PointTrack t : active ) { VoTrack p = t . getCookie ( ) ; // compute normalized image coordinate pixelToNorm . compute ( t . x , t . y , n ) ; // rotate pointing vector into plane reference frame pointing . set ( n . x , n . y , 1 ) ; GeometryMath_F64 . mult ( cameraToPlane . getR ( ) , pointing , pointing ) ; pointing . normalize ( ) ; if ( p . onPlane ) { // see if it still intersects the plane if ( pointing . y > 0 ) { // create data structure for robust motion estimation PlanePtPixel ppp = planeSamples . grow ( ) ; ppp . normalizedCurr . set ( n ) ; ppp . planeKey . set ( p . ground ) ; tracksOnPlane . add ( t ) ; } } else { // if the point is not on the plane visually and ( optionally ) if it passes a strict y - axis rotation // test , consider using the point for estimating rotation . boolean allGood = pointing . y < 0 ; if ( strictFar ) { allGood = isRotationFromAxisY ( t , pointing ) ; } // is it still above the ground plane and only has motion consistent with rotation on ground plane axis if ( allGood ) { computeAngleOfRotation ( t , pointing ) ; tracksFar . add ( t ) ; } } }
public class ElementImpl { /** * This implementation walks the entire document looking for an element * with the given ID attribute . We should consider adding an index to speed * navigation of large documents . */ Element getElementById ( String name ) { } }
for ( Attr attr : attributes ) { if ( attr . isId ( ) && name . equals ( attr . getValue ( ) ) ) { return this ; } } /* * TODO : Remove this behavior . * The spec explicitly says that this is a bad idea . From * Document . getElementById ( ) : " Attributes with the name " ID " * or " id " are not of type ID unless so defined . */ if ( name . equals ( getAttribute ( "id" ) ) ) { return this ; } for ( NodeImpl node : children ) { if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { Element element = ( ( ElementImpl ) node ) . getElementById ( name ) ; if ( element != null ) { return element ; } } } return null ;
public class JdepsFilter { /** * Tests if the given class matches the pattern given in the - include option * @ param cn fully - qualified name */ public boolean matches ( String cn ) { } }
if ( includePattern == null ) return true ; if ( includePattern != null ) return includePattern . matcher ( cn ) . matches ( ) ; return false ;
public class AmazonElasticLoadBalancingClient { /** * Deletes the specified policy from the specified load balancer . This policy must not be enabled for any listeners . * @ param deleteLoadBalancerPolicyRequest * Contains the parameters for DeleteLoadBalancerPolicy . * @ return Result of the DeleteLoadBalancerPolicy operation returned by the service . * @ throws LoadBalancerNotFoundException * The specified load balancer does not exist . * @ throws InvalidConfigurationRequestException * The requested configuration change is not valid . * @ sample AmazonElasticLoadBalancing . DeleteLoadBalancerPolicy * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticloadbalancing - 2012-06-01 / DeleteLoadBalancerPolicy " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteLoadBalancerPolicyResult deleteLoadBalancerPolicy ( DeleteLoadBalancerPolicyRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteLoadBalancerPolicy ( request ) ;
public class NodeManager { /** * Get a runnable node . * @ param requestedNode The request information . * @ param maxLevel The maximum locality level that we can go to . * @ param type The type of resource . * @ param excluded The excluded nodes . * @ return The runnable node that can be used . */ public ClusterNode getRunnableNode ( RequestedNode requestedNode , LocalityLevel maxLevel , ResourceType type , Set < String > excluded ) { } }
ClusterNode node = null ; RunnableIndices r = typeToIndices . get ( type ) ; // find host local node = r . getRunnableNodeForHost ( requestedNode ) ; if ( maxLevel == LocalityLevel . NODE || node != null ) { return node ; } node = r . getRunnableNodeForRack ( requestedNode , excluded ) ; if ( maxLevel == LocalityLevel . RACK || node != null ) { return node ; } // find any node node = r . getRunnableNodeForAny ( excluded ) ; return node ;
public class LexicalUUIDType { /** * singleton */ public int compare ( ByteBuffer o1 , ByteBuffer o2 ) { } }
if ( ! o1 . hasRemaining ( ) || ! o2 . hasRemaining ( ) ) return o1 . hasRemaining ( ) ? 1 : o2 . hasRemaining ( ) ? - 1 : 0 ; return UUIDGen . getUUID ( o1 ) . compareTo ( UUIDGen . getUUID ( o2 ) ) ;
public class DefaultQueryParser { /** * Parses the previously split query string into a query request and fills the parameters of the object * with the data . */ private QueryRequest createQueryRequest ( String [ ] elements ) { } }
QueryRequest request = QueryRequest . create ( ) ; for ( String element : elements ) { String [ ] parts = StringUtils . split ( element , ":" , 2 ) ; parts [ 0 ] = parts [ 0 ] . replace ( "\\" , "\\\\" ) ; parts [ 1 ] = parts [ 1 ] . replace ( "\\" , "\\\\" ) ; request . addParameter ( parts [ 0 ] , parts [ 1 ] . substring ( 1 , parts [ 1 ] . length ( ) - 1 ) ) ; } return request ;
public class SimpleEVD { /** * Returns an eigenvalue as a complex number . For symmetric matrices the returned eigenvalue will always be a real * number , which means the imaginary component will be equal to zero . * NOTE : The order of the eigenvalues is dependent upon the decomposition algorithm used . This means that they may * or may not be ordered by magnitude . For example the QR algorithm will returns results that are partially * ordered by magnitude , but this behavior should not be relied upon . * @ param index Index of the eigenvalue eigenvector pair . * @ return An eigenvalue . */ public Complex_F64 getEigenvalue ( int index ) { } }
if ( is64 ) return ( ( EigenDecomposition_F64 ) eig ) . getEigenvalue ( index ) ; else { Complex_F64 c = ( ( EigenDecomposition_F64 ) eig ) . getEigenvalue ( index ) ; return new Complex_F64 ( c . real , c . imaginary ) ; }
public class RouteSelector { /** * Returns the next proxy to try . May be PROXY . NO _ PROXY but never null . */ private Proxy nextProxy ( ) throws IOException { } }
if ( ! hasNextProxy ( ) ) { throw new SocketException ( "No route to " + address . url ( ) . host ( ) + "; exhausted proxy configurations: " + proxies ) ; } Proxy result = proxies . get ( nextProxyIndex ++ ) ; resetNextInetSocketAddress ( result ) ; return result ;
public class Utils { /** * Closes and flushes the specified { @ link Closeable } items . * @ param closeables An { @ link Iterable } of { @ link Closeable } items . */ public static void closeQuietly ( Iterable < Closeable > closeables ) { } }
for ( Closeable c : closeables ) { try { // Check if we also need to flush if ( c instanceof Flushable ) { ( ( Flushable ) c ) . flush ( ) ; } if ( c != null ) { c . close ( ) ; } } catch ( IOException e ) { LOGGER . debug ( "Error closing:" + c ) ; } }
public class CouchDbClient { /** * Shuts down and releases resources used by this couchDbClient instance . * Note : Apache ' s httpclient was replaced by HttpUrlConnection . * Connection manager is no longer used . */ public void shutdown ( ) { } }
// Delete the cookie _ session if there is one Response response = executeToResponse ( Http . DELETE ( new URIBase ( clientUri ) . path ( "_session" ) . build ( ) ) ) ; if ( ! response . isOk ( ) ) { log . warning ( "Error deleting session on client shutdown." ) ; } // The execute method handles non - 2xx response codes by throwing a CouchDbException . factory . shutdown ( ) ;
public class MKeyArea { /** * Insert this record at the current location . * @ param table The basetable . * @ param keyArea The key area . * @ param bufferNew The buffer to add . * @ param iRelPosition relative position to add the record . * @ exception DBException File exception . */ public void insertCurrent ( FieldTable vectorTable , KeyAreaInfo keyArea , BaseBuffer bufferNew , int iRelPosition ) throws DBException { } }
m_iIndex += iRelPosition ; m_VectorObjects . insertElementAt ( bufferNew , m_iIndex ) ; m_iIndex = - 1 ; // The index can ' t be used in caches anymore .
public class RegexValidator { /** * { @ inheritDoc } */ public void restoreState ( FacesContext context , Object state ) { } }
if ( state != null ) { // Since pattern is required , if state is null // nothing has changed this . pattern = ( String ) state ; }
public class BDAImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . BDA__FLAGS : return getFlags ( ) ; case AfplibPackage . BDA__XOFFSET : return getXoffset ( ) ; case AfplibPackage . BDA__YOFFSET : return getYoffset ( ) ; case AfplibPackage . BDA__DATA : return getData ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class APIAccessService { /** * Process the { @ link JSONObject } request * @ param urlString * @ param request * @ param requestType * @ param requestHedaers * @ return { @ link JSONObject } * @ throws CertificateException * @ throws IOException * @ throws JSONException */ public JSONObject processAPIRequest ( String urlString , JSONObject request , String requestType , String [ ] requestParameters , String ... requestHedaers ) throws CertificateException , IOException , JSONException { } }
resetRequestScenarioVariables ( ) ; String response = null ; GenericUser authenticationUser = ( GenericUser ) getScenarioGlobals ( ) . getAttribute ( API_AUTHENTICATION_USER ) ; if ( BROWSER_API_ACCESS_MODE . equals ( getAPIAccessMode ( ) ) ) { response = processBrowserAPIRequest ( urlString , request , requestType , requestParameters ) ; } else { response = processHTTPAPIRequest ( urlString , request , authenticationUser , requestType , requestParameters , requestHedaers ) ; } JSONObject responseJSON = null ; try { if ( ! StringUtils . isBlank ( response ) ) { responseJSON = new JSONObject ( response ) ; log . debug ( "API JSON response: " + responseJSON . toString ( 4 ) ) ; } } catch ( JSONException ex ) { fail ( "Contructing the JSON respose failed with: " + ex . getLocalizedMessage ( ) + "\n\nResponse: " + response ) ; } getScenarioGlobals ( ) . setAttribute ( APIAccessService . LAST_SCENARIO_JSON_RESPONSE_KEY , responseJSON ) ; return responseJSON ;
public class Classes { /** * Retrieve binary resource as array of bytes . Uses { @ link # getResourceAsStream ( String ) } to read binary resource and * return bytes array . * @ param name resource name . * @ return binary resource content as array of bytes . * @ throws NoSuchBeingException if resource not found . * @ throws IOException if resource reading fails . */ public static byte [ ] getResourceAsBytes ( String name ) throws IOException { } }
InputStream is = getResourceAsStream ( name ) ; ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; try { byte [ ] buffer = new byte [ 4096 ] ; int length = 0 ; while ( ( length = is . read ( buffer ) ) != - 1 ) { os . write ( buffer , 0 , length ) ; } } finally { Files . close ( is ) ; Files . close ( os ) ; } return os . toByteArray ( ) ;
public class DateControl { /** * Unbinds the given control from this control . Unbinding is done for all * properties and observable lists that have previously been bound by the * { @ link # bind ( DateControl , boolean ) } method . * @ param otherControl the control to unbind */ public final void unbind ( DateControl otherControl ) { } }
requireNonNull ( otherControl ) ; boundDateControls . remove ( otherControl ) ; // unbind maps Bindings . unbindContentBidirectional ( otherControl . getCalendarVisibilityMap ( ) , getCalendarVisibilityMap ( ) ) ; // unbind lists Bindings . unbindContentBidirectional ( otherControl . getCalendarSources ( ) , getCalendarSources ( ) ) ; Bindings . unbindContentBidirectional ( otherControl . getSelections ( ) , getSelections ( ) ) ; // unbind properties Bindings . unbindBidirectional ( otherControl . suspendUpdatesProperty ( ) , suspendUpdatesProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . entryFactoryProperty ( ) , entryFactoryProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . defaultCalendarProviderProperty ( ) , defaultCalendarProviderProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . virtualGridProperty ( ) , virtualGridProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . draggedEntryProperty ( ) , draggedEntryProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . requestedTimeProperty ( ) , requestedTimeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . selectionModeProperty ( ) , selectionModeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . selectionModeProperty ( ) , selectionModeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . weekFieldsProperty ( ) , weekFieldsProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . dateProperty ( ) , dateProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . todayProperty ( ) , todayProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . zoneIdProperty ( ) , zoneIdProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . layoutProperty ( ) , layoutProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . startTimeProperty ( ) , startTimeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . endTimeProperty ( ) , endTimeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . timeProperty ( ) , timeProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . usagePolicyProperty ( ) , usagePolicyProperty ( ) ) ; // unbind callbacks Bindings . unbindBidirectional ( otherControl . entryDetailsCallbackProperty ( ) , entryDetailsCallbackProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . dateDetailsCallbackProperty ( ) , dateDetailsCallbackProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . contextMenuCallbackProperty ( ) , contextMenuCallbackProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . entryContextMenuCallbackProperty ( ) , entryContextMenuCallbackProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . calendarSourceFactoryProperty ( ) , calendarSourceFactoryProperty ( ) ) ; Bindings . unbindBidirectional ( otherControl . entryEditPolicyProperty ( ) , entryEditPolicyProperty ( ) ) ;
public class MTRandom { /** * This simply utility method can be used in cases where a byte * array of seed data is to be used to repeatedly re - seed the * random number sequence . By packing the byte array into an * integer array first , using this method , and then invoking * setSeed ( ) with that ; it removes the need to re - pack the byte * array each time setSeed ( ) is called . * If the length of the byte array is not a multiple of 4 then * it is implicitly padded with zeros as necessary . For example : * < pre > byte [ ] { 0x01 , 0x02 , 0x03 , 0x04 , 0x05 , 0x06 } < / pre > * becomes * < pre > int [ ] { 0x04030201 , 0x00000605 } < / pre > * Note that this method will not complain if the given byte array * is empty and will produce an empty integer array , but the * setSeed ( ) method will throw an exception if the empty integer * array is passed to it . * @ param buf The non - null byte array to be packed . * @ return A non - null integer array of the packed bytes . * @ throws NullPointerException if the given byte array is null . */ public static int [ ] pack ( byte [ ] buf ) { } }
int k , blen = buf . length , ilen = ( ( buf . length + 3 ) >>> 2 ) ; int [ ] ibuf = new int [ ilen ] ; for ( int n = 0 ; n < ilen ; n ++ ) { int m = ( n + 1 ) << 2 ; if ( m > blen ) m = blen ; for ( k = buf [ -- m ] & 0xff ; ( m & 0x3 ) != 0 ; k = ( k << 8 ) | buf [ -- m ] & 0xff ) ; ibuf [ n ] = k ; } return ibuf ;
public class Process { /** * Gets all of the Content Specification Unique Topic ID ' s that are used in the process . * @ return A List of Unique Topic ID ' s . */ protected List < String > getTopicIds ( ) { } }
LinkedList < String > topicIds = new LinkedList < String > ( ) ; for ( final Entry < String , SpecTopic > specTopicEntry : topics . entrySet ( ) ) { topicIds . add ( specTopicEntry . getKey ( ) ) ; } return topicIds ;
public class HadoopUtils { /** * Returns a new Hadoop Configuration object using the path to the hadoop conf configured * in the main configuration ( flink - conf . yaml ) . * This method is public because its being used in the HadoopDataSource . * @ param flinkConfiguration Flink configuration object * @ return A Hadoop configuration instance */ public static Configuration getHadoopConfiguration ( org . apache . flink . configuration . Configuration flinkConfiguration ) { } }
Configuration retConf = new Configuration ( ) ; // We need to load both core - site . xml and hdfs - site . xml to determine the default fs path and // the hdfs configuration // Try to load HDFS configuration from Hadoop ' s own configuration files // 1 . approach : Flink configuration final String hdfsDefaultPath = flinkConfiguration . getString ( ConfigConstants . HDFS_DEFAULT_CONFIG , null ) ; if ( hdfsDefaultPath != null ) { retConf . addResource ( new org . apache . hadoop . fs . Path ( hdfsDefaultPath ) ) ; } else { LOG . debug ( "Cannot find hdfs-default configuration file" ) ; } final String hdfsSitePath = flinkConfiguration . getString ( ConfigConstants . HDFS_SITE_CONFIG , null ) ; if ( hdfsSitePath != null ) { retConf . addResource ( new org . apache . hadoop . fs . Path ( hdfsSitePath ) ) ; } else { LOG . debug ( "Cannot find hdfs-site configuration file" ) ; } // 2 . Approach environment variables String [ ] possibleHadoopConfPaths = new String [ 4 ] ; possibleHadoopConfPaths [ 0 ] = flinkConfiguration . getString ( ConfigConstants . PATH_HADOOP_CONFIG , null ) ; possibleHadoopConfPaths [ 1 ] = System . getenv ( "HADOOP_CONF_DIR" ) ; if ( System . getenv ( "HADOOP_HOME" ) != null ) { possibleHadoopConfPaths [ 2 ] = System . getenv ( "HADOOP_HOME" ) + "/conf" ; possibleHadoopConfPaths [ 3 ] = System . getenv ( "HADOOP_HOME" ) + "/etc/hadoop" ; // hadoop 2.2 } for ( String possibleHadoopConfPath : possibleHadoopConfPaths ) { if ( possibleHadoopConfPath != null ) { if ( new File ( possibleHadoopConfPath ) . exists ( ) ) { if ( new File ( possibleHadoopConfPath + "/core-site.xml" ) . exists ( ) ) { retConf . addResource ( new org . apache . hadoop . fs . Path ( possibleHadoopConfPath + "/core-site.xml" ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Adding " + possibleHadoopConfPath + "/core-site.xml to hadoop configuration" ) ; } } if ( new File ( possibleHadoopConfPath + "/hdfs-site.xml" ) . exists ( ) ) { retConf . addResource ( new org . apache . hadoop . fs . Path ( possibleHadoopConfPath + "/hdfs-site.xml" ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Adding " + possibleHadoopConfPath + "/hdfs-site.xml to hadoop configuration" ) ; } } } } } return retConf ;
public class GetData { /** * Evaluate the expression at the < code > value < / code > attribute and store the result in the * { @ link javax . servlet . jsp . PageContext } under the attribute key specified in { @ link # setResultId ( String ) } . * If an existing key in the PageContext ' s attribute map exists , a warning will be written to the log file . * If errors occur during expression evaluation , they will be reported in the JSP page . If the value * returned by the expression is null , an attribute named < code > resultId < / code > will be removed from * the PageContext ' s attribute map . * @ return { @ link # EVAL _ PAGE } */ public int doEndTag ( ) throws JspException { } }
if ( _value != null ) { if ( LOGGER . isInfoEnabled ( ) && pageContext . getAttribute ( _resultId ) != null ) LOGGER . info ( "Overwriting a value in PageContext attribute map with key \"" + _resultId + "\" and object of type \"" + _value . getClass ( ) . getName ( ) ) ; pageContext . setAttribute ( _resultId , _value ) ; } else { if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Removing a value from the PageContext attribute map with key \"" + _resultId + "\". The object returned by the expression is null." ) ; pageContext . removeAttribute ( _resultId ) ; } localRelease ( ) ; return EVAL_PAGE ;
public class CmsVfsMemoryObjectCache { /** * Returns a cache key for the given system id ( filename ) based on the status * of the given project flag . < p > * @ param systemId the system id ( filename ) to get the cache key for * @ param online indicates if this key is generated for the online project * @ return the cache key for the system id */ private String getCacheKey ( String systemId , boolean online ) { } }
if ( online ) { return "online_(" + m_id + ")_" + systemId ; } return "offline_(" + m_id + ")_" + systemId ;
public class LogRepositorySpaceAlert { /** * determine if the repository location is fine as is , or if it may be shortened . Shortening is hpel specific in * that we look for logdata or tracedata as the suffix and , if it is there AND the parent directory is of about * the same size , then we keep the parent directory . This allows us to combine needs of different repositories * that may be in the same fileSystem . ie : If logdata , tracedata , and textLog all need just 20 meg each and they * are on the same fs which has 50 meg left , the only way we know that this is a problem is if we consider the * combined need . By modifying the repositoryLoc , this should work for the vast majority of cases . * @ param repositoryLocation incoming repository location * @ return same or modified repository location depending on file system information */ private File calculateFsRoot ( File repositoryLocation ) { } }
// Find first existing directory among ancestors while ( ! AccessHelper . isDirectory ( repositoryLocation ) ) { repositoryLocation = repositoryLocation . getParentFile ( ) ; } if ( repositoryLocation . getName ( ) . equals ( LogRepositoryBaseImpl . DEFAULT_LOCATION ) || repositoryLocation . getName ( ) . equals ( LogRepositoryBaseImpl . TRACE_LOCATION ) ) { File parentFile = repositoryLocation . getParentFile ( ) ; if ( AccessHelper . getTotalSpace ( repositoryLocation ) == AccessHelper . getTotalSpace ( parentFile ) ) { long thisFsSpace = AccessHelper . getFreeSpace ( repositoryLocation ) ; long parentFsSpace = AccessHelper . getFreeSpace ( parentFile ) ; // If it ends in tracedata or logdata and parent directory and this directory are highly similar ( within 5 % ) // in free space . . . assume we can cut off the tracedata or logdata because on same fs . This means that // in the case where the logs are the same fs . . . we will consider combined needs . if ( thisFsSpace > 0 && parentFsSpace > 0 && thisFsSpace > ( long ) ( parentFsSpace * .95 ) && thisFsSpace < ( long ) ( parentFsSpace * 1.05 ) ) { repositoryLocation = parentFile ; } } } return repositoryLocation ;
public class SARLStandaloneSetup { /** * Create the injector based on the given set of modules and prepare the EMF infrastructure . * @ param modules the injection modules that are overriding the standard SARL module . * @ return the injector . * @ since 0.8 * @ see SARLRuntimeModule */ public Injector createInjectorAndDoEMFRegistration ( Module ... modules ) { } }
doPreSetup ( ) ; final Injector injector = createInjector ( modules ) ; register ( injector ) ; return injector ;
public class InternalNode { /** * remove child */ protected void moveChildsLeft ( final int srcPos ) { } }
// if ( log . isDebugEnabled ( ) ) log . debug ( " moveKeysLeft ( " + srcPos + " ) allocated = " + allocated + " : " + // keys . length + " : " + ( allocated - srcPos - 1 ) + " : " + ( keys . length - srcPos - 1 ) ) ; System . arraycopy ( childs , srcPos + 1 , childs , srcPos , allocated - srcPos ) ;
public class EventSubscriptionsInner { /** * List all event subscriptions for a specific topic . * List all event subscriptions that have been created for a specific topic . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param providerNamespace Namespace of the provider of the topic * @ param resourceTypeName Name of the resource type * @ param resourceName Name of the resource * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; EventSubscriptionInner & gt ; object if successful . */ public List < EventSubscriptionInner > listByResource ( String resourceGroupName , String providerNamespace , String resourceTypeName , String resourceName ) { } }
return listByResourceWithServiceResponseAsync ( resourceGroupName , providerNamespace , resourceTypeName , resourceName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AcpOrd { /** * < p > It checks additionally and books S . E . items . < / p > * @ param pRqVs additional request scoped parameters * @ param pCoOr consolidate order * @ throws Exception - an exception if incomplete */ public final void adChekBookSe ( final Map < String , Object > pRqVs , final CuOrSe pCoOr ) throws Exception { } }
// additional checking : String tbn ; // check availability and booking for same good in different orders : List < CuOrSeGdLn > gljs = null ; List < CuOrSeGdLn > glrs = null ; for ( CuOrSeGdLn gl : pCoOr . getGoods ( ) ) { // join lines with same item : for ( CuOrSeGdLn gl0 : pCoOr . getGoods ( ) ) { if ( ! gl . getItsId ( ) . equals ( gl0 . getItsId ( ) ) && gl . getGood ( ) . getItsId ( ) . equals ( gl0 . getGood ( ) . getItsId ( ) ) ) { if ( gljs == null ) { gljs = new ArrayList < CuOrSeGdLn > ( ) ; glrs = new ArrayList < CuOrSeGdLn > ( ) ; } glrs . add ( gl0 ) ; if ( ! gljs . contains ( gl ) ) { gljs . add ( gl ) ; } gl . setQuant ( gl . getQuant ( ) . add ( gl0 . getQuant ( ) ) ) ; } } } if ( gljs != null ) { for ( CuOrSeGdLn glr : glrs ) { pCoOr . getGoods ( ) . remove ( glr ) ; } tbn = SeGoodsPlace . class . getSimpleName ( ) ; pRqVs . put ( tbn + "itemdeepLevel" , 1 ) ; // only ID pRqVs . put ( tbn + "pickUpPlacedeepLevel" , 1 ) ; for ( CuOrSeGdLn gl : gljs ) { List < SeGoodsPlace > gps = getSrvOrm ( ) . retrieveListWithConditions ( pRqVs , SeGoodsPlace . class , "where ITEM=" + gl . getGood ( ) . getItsId ( ) + " and ITSQUANTITY>=" + gl . getQuant ( ) ) ; if ( gps . size ( ) == 0 ) { throw new Exception ( "AC. S.E.Good is not available #" + gl . getGood ( ) . getItsId ( ) ) ; } } pRqVs . remove ( tbn + "itemdeepLevel" ) ; pRqVs . remove ( tbn + "pickUpPlacedeepLevel" ) ; } // bookable services final - checkout : String cond ; tbn = SeServicePlace . class . getSimpleName ( ) ; pRqVs . put ( tbn + "itemdeepLevel" , 1 ) ; // only ID pRqVs . put ( tbn + "pickUpPlacedeepLevel" , 1 ) ; for ( CuOrSeSrLn sl : pCoOr . getServs ( ) ) { if ( sl . getDt1 ( ) != null ) { cond = "left join (select distinct SERV from SESERBUS where FRE=0 and SERV=" + sl . getService ( ) . getItsId ( ) + " and FRTM>=" + sl . getDt1 ( ) . getTime ( ) + " and TITM<" + sl . getDt2 ( ) . getTime ( ) + ") as SERBUS on SERBUS.SERV=SESERVICEPLACE.ITEM where ITEM=" + sl . getService ( ) + " and ITSQUANTITY>0 and SERBUS.SERV is null" ; List < SeServicePlace > sps = getSrvOrm ( ) . retrieveListWithConditions ( pRqVs , SeServicePlace . class , cond ) ; if ( sps . size ( ) == 0 ) { throw new Exception ( "AC. BK.Service is not available #" + sl . getService ( ) . getItsId ( ) ) ; } } } pRqVs . remove ( tbn + "itemdeepLevel" ) ; pRqVs . remove ( tbn + "pickUpPlacedeepLevel" ) ; // booking : // changing availability ( booking ) : ColumnsValues cvsIil = null ; String [ ] fnmIil = null ; if ( this . fastLoc ) { cvsIil = new ColumnsValues ( ) ; cvsIil . getFormula ( ) . add ( "availableQuantity" ) ; } else { fnmIil = new String [ ] { "itsId" , "itsVersion" , "availableQuantity" } ; } tbn = SeGoodsPlace . class . getSimpleName ( ) ; pRqVs . put ( tbn + "itemdeepLevel" , 1 ) ; // only ID pRqVs . put ( tbn + "pickUpPlacedeepLevel" , 1 ) ; for ( CuOrSeGdLn gl : pCoOr . getGoods ( ) ) { List < SeGoodsPlace > gps = getSrvOrm ( ) . retrieveListWithConditions ( pRqVs , SeGoodsPlace . class , "where ISALWAYS=0 and ITEM=" + gl . getGood ( ) . getItsId ( ) ) ; if ( gps . size ( ) != 0 ) { BigDecimal avQu = BigDecimal . ZERO ; for ( SeGoodsPlace gp : gps ) { avQu = avQu . add ( gp . getItsQuantity ( ) ) ; } if ( avQu . compareTo ( gl . getQuant ( ) ) == - 1 ) { // previous test should not be passed ! ! ! throw new Exception ( "AC. S.E.Good is not available #" + gl . getGood ( ) . getItsId ( ) ) ; } BigDecimal rst = gl . getQuant ( ) ; for ( SeGoodsPlace gp : gps ) { if ( rst . compareTo ( BigDecimal . ZERO ) == 0 ) { break ; } if ( gp . getItsQuantity ( ) . compareTo ( gl . getQuant ( ) ) == - 1 ) { rst = rst . subtract ( gp . getItsQuantity ( ) ) ; gp . setItsQuantity ( BigDecimal . ZERO ) ; } else { gp . setItsQuantity ( gp . getItsQuantity ( ) . subtract ( rst ) ) ; rst = BigDecimal . ZERO ; } // TODO PERFORM fastupd getSrvOrm ( ) . updateEntity ( pRqVs , gp ) ; } String wheTyId = "ITSTYPE=2 and ITEMID=" + gl . getGood ( ) . getItsId ( ) ; if ( this . fastLoc ) { // it must be RDBMS constraint : " availableQuantity > = 0 " ! cvsIil . put ( "itsVersion" , new Date ( ) . getTime ( ) ) ; cvsIil . put ( "availableQuantity" , "AVAILABLEQUANTITY-" + gl . getQuant ( ) ) ; this . srvDb . executeUpdate ( "ITEMINLIST" , cvsIil , wheTyId ) ; } else { pRqVs . put ( "fieldsNames" , fnmIil ) ; List < ItemInList > iils = this . srvOrm . retrieveListWithConditions ( pRqVs , ItemInList . class , "where " + wheTyId ) ; if ( iils . size ( ) == 1 ) { BigDecimal aq = iils . get ( 0 ) . getAvailableQuantity ( ) . subtract ( gl . getQuant ( ) ) ; if ( aq . compareTo ( BigDecimal . ZERO ) == - 1 ) { pRqVs . remove ( "fieldsNames" ) ; throw new Exception ( "ItemInList NA avQuan SeGood: id/itId/avQua/quan: " + iils . get ( 0 ) . getItsId ( ) + "/" + gl . getGood ( ) . getItsId ( ) + "/" + iils . get ( 0 ) . getAvailableQuantity ( ) + "/" + gl . getQuant ( ) ) ; } else { iils . get ( 0 ) . setAvailableQuantity ( aq ) ; getSrvOrm ( ) . updateEntity ( pRqVs , iils . get ( 0 ) ) ; } } else { pRqVs . remove ( "fieldsNames" ) ; throw new Exception ( "ItemInList WC for SeGood: itId/count: " + gl . getGood ( ) . getItsId ( ) + "/" + iils . size ( ) ) ; } pRqVs . remove ( "fieldsNames" ) ; } } } pRqVs . remove ( tbn + "itemdeepLevel" ) ; pRqVs . remove ( tbn + "pickUpPlacedeepLevel" ) ; tbn = SeServicePlace . class . getSimpleName ( ) ; pRqVs . put ( tbn + "itemdeepLevel" , 1 ) ; // only ID pRqVs . put ( tbn + "pickUpPlacedeepLevel" , 1 ) ; boolean tibs = false ; for ( CuOrSeSrLn sl : pCoOr . getServs ( ) ) { if ( sl . getDt1 ( ) == null ) { List < SeServicePlace > sps = getSrvOrm ( ) . retrieveListWithConditions ( pRqVs , SeServicePlace . class , "where ISALWAYS=0 and ITEM=" + sl . getService ( ) . getItsId ( ) ) ; if ( sps . size ( ) != 0 ) { BigDecimal avQu = BigDecimal . ZERO ; for ( SeServicePlace sp : sps ) { avQu = avQu . add ( sp . getItsQuantity ( ) ) ; } if ( avQu . compareTo ( sl . getQuant ( ) ) == - 1 ) { // previous test should not be passed ! ! ! throw new Exception ( "AC. S.E.Service is not available #" + sl . getService ( ) . getItsId ( ) ) ; } BigDecimal rst = sl . getQuant ( ) ; for ( SeServicePlace sp : sps ) { if ( rst . compareTo ( BigDecimal . ZERO ) == 0 ) { break ; } if ( sp . getItsQuantity ( ) . compareTo ( sl . getQuant ( ) ) == - 1 ) { rst = rst . subtract ( sp . getItsQuantity ( ) ) ; sp . setItsQuantity ( BigDecimal . ZERO ) ; } else { sp . setItsQuantity ( sp . getItsQuantity ( ) . subtract ( rst ) ) ; rst = BigDecimal . ZERO ; } // TODO PERFORM fastupd getSrvOrm ( ) . updateEntity ( pRqVs , sp ) ; } String wheTyId = "ITSTYPE=3 and ITEMID=" + sl . getService ( ) . getItsId ( ) ; if ( this . fastLoc ) { cvsIil . put ( "itsVersion" , new Date ( ) . getTime ( ) ) ; cvsIil . put ( "availableQuantity" , "AVAILABLEQUANTITY-" + sl . getQuant ( ) ) ; this . srvDb . executeUpdate ( "ITEMINLIST" , cvsIil , wheTyId ) ; } else { pRqVs . put ( "fieldsNames" , fnmIil ) ; List < ItemInList > iils = this . srvOrm . retrieveListWithConditions ( pRqVs , ItemInList . class , "where " + wheTyId ) ; if ( iils . size ( ) == 1 ) { BigDecimal aq = iils . get ( 0 ) . getAvailableQuantity ( ) . subtract ( sl . getQuant ( ) ) ; if ( aq . compareTo ( BigDecimal . ZERO ) == - 1 ) { pRqVs . remove ( "fieldsNames" ) ; throw new Exception ( "ItemInList NA avQuan SESERV: id/itId/avQua/quan: " + iils . get ( 0 ) . getItsId ( ) + "/" + sl . getService ( ) . getItsId ( ) + "/" + iils . get ( 0 ) . getAvailableQuantity ( ) + "/" + sl . getQuant ( ) ) ; } else { iils . get ( 0 ) . setAvailableQuantity ( aq ) ; getSrvOrm ( ) . updateEntity ( pRqVs , iils . get ( 0 ) ) ; } } else { pRqVs . remove ( "fieldsNames" ) ; throw new Exception ( "ItemInList WC for SESERV: itId/count: " + sl . getService ( ) . getItsId ( ) + "/" + iils . size ( ) ) ; } pRqVs . remove ( "fieldsNames" ) ; } } } else { tibs = true ; } } pRqVs . remove ( tbn + "itemdeepLevel" ) ; pRqVs . remove ( tbn + "pickUpPlacedeepLevel" ) ; if ( tibs ) { tbn = SeSerBus . class . getSimpleName ( ) ; Set < String > ndFl = new HashSet < String > ( ) ; ndFl . add ( "itsId" ) ; ndFl . add ( "itsVersion" ) ; pRqVs . put ( tbn + "neededFields" , ndFl ) ; List < SeSerBus > sbas = this . srvOrm . retrieveListWithConditions ( pRqVs , SeSerBus . class , "where FRE=1" ) ; int i = 0 ; pRqVs . remove ( tbn + "neededFields" ) ; for ( CuOrSeSrLn sl : pCoOr . getServs ( ) ) { if ( sl . getDt1 ( ) != null ) { SeSerBus sb ; if ( i < sbas . size ( ) ) { sb = sbas . get ( i ) ; sb . setFre ( false ) ; } else { sb = new SeSerBus ( ) ; } sb . setServ ( sl . getService ( ) ) ; sb . setFrTm ( sl . getDt1 ( ) ) ; sb . setTiTm ( sl . getDt2 ( ) ) ; if ( i < sbas . size ( ) ) { getSrvOrm ( ) . updateEntity ( pRqVs , sb ) ; i ++ ; } else { getSrvOrm ( ) . insertEntity ( pRqVs , sb ) ; } } } }
public class Searcher { /** * Removes one or several faceted attributes for the next queries . * If the facet was added several times , you need to call this method several times too or use { @ link # deleteFacet } . * @ param attributes one or more attribute names . * @ return this { @ link Searcher } for chaining . */ @ SuppressWarnings ( { } }
"WeakerAccess" , "unused" } ) // For library users public Searcher removeFacet ( String ... attributes ) { for ( String attribute : attributes ) { final Integer value = facetRequestCount . get ( attribute ) ; if ( value == null ) { Log . e ( "Algolia|Searcher" , "removeFacet called for" + attribute + " which was not currently a facet." ) ; } else if ( value == 1 ) { facets . remove ( attribute ) ; facetRequestCount . put ( attribute , 0 ) ; } else { facetRequestCount . put ( attribute , value - 1 ) ; } } rebuildQueryFacets ( ) ; return this ;
public class AppServicePlansInner { /** * Get all App Service plans in a resource group . * Get all App Service plans in a resource group . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; AppServicePlanInner & gt ; object */ public Observable < Page < AppServicePlanInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } }
return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < AppServicePlanInner > > , Page < AppServicePlanInner > > ( ) { @ Override public Page < AppServicePlanInner > call ( ServiceResponse < Page < AppServicePlanInner > > response ) { return response . body ( ) ; } } ) ;
public class TagsUtils { /** * Parses a comma separated list of tag names . * @ param tagNames a comma separated list of tag names . * @ return the list of tag names . */ private static List < String > parseTagNames ( String tagNames ) { } }
return Arrays . stream ( tagNames . split ( "\\s*,\\s*" ) ) . map ( String :: trim ) . collect ( Collectors . toList ( ) ) ;
public class PBKDF2Realm { /** * Return the Principal associated with the specified username and * credentials , if one exists in the user data store ; otherwise return null . */ @ Override public Principal authenticate ( String username , String credentials ) { } }
GenericPrincipal principal = ( GenericPrincipal ) getPrincipal ( username ) ; if ( null != principal ) { try { if ( ! PasswordStorage . verifyPassword ( credentials , principal . getPassword ( ) ) ) { principal = null ; } } catch ( CannotPerformOperationException | InvalidHashException e ) { LOGR . log ( Level . WARNING , e . getMessage ( ) ) ; principal = null ; } } return principal ;
public class CSSMediaQuery { /** * Append a media expression to the list . * @ param aMediaExpression * The media expression to be added . May not be < code > null < / code > . * @ return this */ @ Nonnull public CSSMediaQuery addMediaExpression ( @ Nonnull final CSSMediaExpression aMediaExpression ) { } }
ValueEnforcer . notNull ( aMediaExpression , "MediaExpression" ) ; m_aMediaExpressions . add ( aMediaExpression ) ; return this ;
public class JMLambda { /** * Supplier if true optional . * @ param < R > the type parameter * @ param bool the bool * @ param supplier the supplier * @ return the optional */ public static < R > Optional < R > supplierIfTrue ( boolean bool , Supplier < R > supplier ) { } }
return bool ? Optional . ofNullable ( supplier . get ( ) ) : Optional . empty ( ) ;
public class EsperantoTagger { /** * " xx " for an unknown verb . */ private String findTransitivity ( String verb ) { } }
if ( verb . endsWith ( "iĝi" ) ) { return "nt" ; } else if ( verb . endsWith ( "igi" ) ) { // The verb " memmortigi is strange : even though it ends in - igi , it // is intransitive . return verb . equals ( "memmortigi" ) ? "nt" : "tr" ; } // This loop executes only once for most verbs ( or very few times ) . for ( ; ; ) { boolean isTransitive = setTransitiveVerbs . contains ( verb ) ; boolean isIntransitive = setIntransitiveVerbs . contains ( verb ) ; if ( isTransitive ) { return isIntransitive ? "tn" : "tr" ; } else if ( isIntransitive ) { return "nt" ; } // Verb is not explicitly listed as transitive or intransitive . // Try to remove a prefix mal - , ek - , re - , mis - fi - or // suffix - ad , - aĉ , - et , - eg since those never alter // transitivity . Then look up verb again in case we find // a verb with a known transitivity . For example , given a verb // " malŝategi " , we will probe " malŝategi " , " ŝategi " " ŝati " // and then finally find out that " ŝati " is transitive . Matcher matcherPrefix = patternPrefix . matcher ( verb ) ; if ( matcherPrefix . find ( ) ) { // Remove a prefix and try again . verb = matcherPrefix . group ( 1 ) ; continue ; } Matcher matcherSuffix = patternSuffix . matcher ( verb ) ; if ( matcherSuffix . find ( ) ) { // Remove a suffix and try again . verb = matcherSuffix . group ( 1 ) + "i" ; continue ; } break ; } return "xx" ; // Unknown transitivity .
public class SystemUtil { /** * 获取总线程数 * @ return 总线程数 */ public static int getTotalThreadCount ( ) { } }
ThreadGroup parentThread = Thread . currentThread ( ) . getThreadGroup ( ) ; while ( null != parentThread . getParent ( ) ) { parentThread = parentThread . getParent ( ) ; } return parentThread . activeCount ( ) ;
public class EntityFinder { /** * Utility method that employs some heuristics to find the { @ link EntityInfo } s * for the polymeric chains given in constructor . * To be used in case the information is missing in PDB / mmCIF file * @ return */ public static List < EntityInfo > findPolyEntities ( List < List < Chain > > polyModels ) { } }
TreeMap < String , EntityInfo > chainIds2entities = findEntitiesFromAlignment ( polyModels ) ; List < EntityInfo > entities = findUniqueEntities ( chainIds2entities ) ; return entities ;
public class StandardThreadExecutor { /** * 线程池内异常处理 * @ param r * @ param t */ private void printException ( Runnable r , Throwable t ) { } }
if ( t == null && r instanceof Future < ? > ) { try { Future < ? > future = ( Future < ? > ) r ; if ( future . isDone ( ) ) future . get ( ) ; } catch ( CancellationException ce ) { t = ce ; } catch ( ExecutionException ee ) { t = ee . getCause ( ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; // ignore / reset } } if ( t != null ) logger . error ( t . getMessage ( ) , t ) ;
public class TrainingsImpl { /** * Delete a set of image regions . * @ param projectId The project id * @ param regionIds Regions to delete . Limited to 64 * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > deleteImageRegionsAsync ( UUID projectId , List < String > regionIds , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( deleteImageRegionsWithServiceResponseAsync ( projectId , regionIds ) , serviceCallback ) ;