signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LocationRangeSet { /** * Reduces a set of LocationRange objects . */ public static SortedSet reduce ( SortedSet locations ) { } }
SortedSet newSet = new TreeSet ( ) ; Iterator it = locations . iterator ( ) ; while ( it . hasNext ( ) ) { LocationRange next = ( LocationRange ) it . next ( ) ; if ( newSet . size ( ) == 0 ) { newSet . add ( next ) ; continue ; } if ( next . getStartLocation ( ) . compareTo ( next . getEndLocation ( ) ) >= 0 ) { continue ; } // Try to reduce the set by joining adjacent ranges or eliminating // overlap . LocationRange last = ( LocationRange ) newSet . last ( ) ; if ( next . getStartLocation ( ) . compareTo ( last . getEndLocation ( ) ) <= 0 ) { if ( last . getEndLocation ( ) . compareTo ( next . getEndLocation ( ) ) <= 0 ) { newSet . remove ( last ) ; newSet . add ( new LocationRangeImpl ( last , next ) ) ; } continue ; } newSet . add ( next ) ; } return newSet ;
public class NodeObject { /** * Requests that the < code > cacheData < / code > field be set to the * specified value . The local value will be updated immediately and an * event will be propagated through the system to notify all listeners * that the attribute did change . Proxied copies of this object ( on * clients ) will apply the value change when they received the * attribute changed notification . */ @ Generated ( value = { } }
"com.threerings.presents.tools.GenDObjectTask" } ) public void setCacheData ( NodeObject . CacheData value ) { NodeObject . CacheData ovalue = this . cacheData ; requestAttributeChange ( CACHE_DATA , value , ovalue ) ; this . cacheData = value ;
public class Engine { /** * Installs Feature while skipping dependency check * @ param installAsset InstallAsset to install * @ param filesInstalled List of files to be installed * @ param featuresToBeInstalled Collection of feature names to install * @ param existsAction Action to take if asset exists * @ param executableFiles Set of executable file names * @ param extattrFiles Extendible attribute files as a set * @ param downloadDependencies If dependencies should be downloaded * @ param proxy RestRepositoryConnectionProxy to connect to * @ param checksumsManager ChecksumsManager for installed files * @ throws IOException * @ throws InstallException */ public void installFeatureNoDependencyCheck ( InstallAsset installAsset , List < File > filesInstalled , Collection < String > featuresToBeInstalled , ExistsAction existsAction , Set < String > executableFiles , Map < String , Set < String > > extattrFiles , boolean downloadDependencies , RestRepositoryConnectionProxy proxy , ChecksumsManager checksumsManager ) throws IOException , InstallException { } }
ESAAdaptor . install ( product , ( ESAAsset ) installAsset , filesInstalled , featuresToBeInstalled , existsAction , executableFiles , extattrFiles , checksumsManager , true ) ;
public class SelectSubqueryExpression { /** * Resolve the subquery ' s correlated TVEs ( and , in one special case , aggregates ) * that became ParameterValueExpressions in the subquery statement ( or its children ) . * If they reference a column from the parent statement ( getOrigStmtId ( ) = = parentStmt . m _ stmtId ) * that PVE will have to be initialized by this subquery expression in the back - end executor . * Otherwise , the TVE references a grandparent statement with its own subquery expression , * so just add it to the parent statement ' s set of correlated TVEs needing to be resolved later * at a higher level . */ public void resolveCorrelations ( ) { } }
AbstractParsedStmt subqueryStmt = m_subquery . getSubqueryStmt ( ) ; AbstractParsedStmt parentStmt = subqueryStmt . m_parentStmt ; // we must have a parent - it ' s a subquery statement assert ( parentStmt != null ) ; // Preserve indexes of all parameters this subquery depends on . // It might include parameters from its nested child subqueries that // the subquery statement could not resolve itself and had to " move up " . m_allParameterIdxList . addAll ( subqueryStmt . m_parameterTveMap . keySet ( ) ) ; for ( Map . Entry < Integer , AbstractExpression > entry : subqueryStmt . m_parameterTveMap . entrySet ( ) ) { Integer paramIdx = entry . getKey ( ) ; AbstractExpression expr = entry . getValue ( ) ; if ( expr instanceof TupleValueExpression ) { TupleValueExpression tve = ( TupleValueExpression ) expr ; if ( tve . getOrigStmtId ( ) == parentStmt . getStmtId ( ) ) { // TVE originates from the statement that this SubqueryExpression belongs to addArgumentParameter ( paramIdx , expr ) ; } else { // TVE originates from a statement above this parent . Move it up . parentStmt . m_parameterTveMap . put ( paramIdx , expr ) ; } } else if ( expr instanceof AggregateExpression ) { // An aggregate expression is always from THIS parent statement . addArgumentParameter ( paramIdx , expr ) ; } else { // so far it should be either AggregateExpression or TupleValueExpression types assert ( false ) ; } } subqueryStmt . m_parameterTveMap . clear ( ) ;
public class MCDRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . MCDRG__RG_LENGTH : setRGLength ( ( Integer ) newValue ) ; return ; case AfplibPackage . MCDRG__TRIPLETS : getTriplets ( ) . clear ( ) ; getTriplets ( ) . addAll ( ( Collection < ? extends Triplet > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class nsip { /** * Use this API to enable nsip . */ public static base_response enable ( nitro_service client , nsip resource ) throws Exception { } }
nsip enableresource = new nsip ( ) ; enableresource . ipaddress = resource . ipaddress ; enableresource . td = resource . td ; return enableresource . perform_operation ( client , "enable" ) ;
public class CommonMatchers { /** * = = > ARRAY = = > */ public static Matcher < JsonElement > areItemsValid ( final Validator validator ) { } }
return new TypeSafeDiagnosingMatcher < JsonElement > ( ) { @ Override protected boolean matchesSafely ( JsonElement item , Description mismatchDescription ) { // we do not care for the properties if parent item is not JsonArray if ( ! item . isJsonArray ( ) ) return true ; for ( int i = 0 ; i < item . asJsonArray ( ) . length ( ) ; i ++ ) { StringBuilder sb = new StringBuilder ( ) ; if ( ! validator . validate ( item . asJsonArray ( ) . opt ( i ) , sb ) ) { mismatchDescription . appendText ( "item at pos: " + i + ", does not validate by validator " + validator . getTitle ( ) ) . appendText ( "\nDetails: " ) . appendText ( sb . toString ( ) ) ; return false ; } } return true ; } @ Override public void describeTo ( Description description ) { description . appendText ( "are array items valid" ) ; } } ;
public class SingleIndexWriter { /** * Generate single index file , for all Unicode characters . * @ param configuration the configuration for this doclet * @ param indexbuilder IndexBuilder built by { @ link IndexBuilder } * @ throws DocFileIOException if there is a problem generating the index */ public static void generate ( ConfigurationImpl configuration , IndexBuilder indexbuilder ) throws DocFileIOException { } }
DocPath filename = DocPaths . INDEX_ALL ; SingleIndexWriter indexgen = new SingleIndexWriter ( configuration , filename , indexbuilder ) ; indexgen . generateIndexFile ( ) ;
public class AWSServiceCatalogClient { /** * Lists the specified requests or all performed requests . * @ param listRecordHistoryRequest * @ return Result of the ListRecordHistory operation returned by the service . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ sample AWSServiceCatalog . ListRecordHistory * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / ListRecordHistory " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListRecordHistoryResult listRecordHistory ( ListRecordHistoryRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListRecordHistory ( request ) ;
public class IOManagerAsync { /** * Close method . Shuts down the reader and writer threads immediately , not waiting for their * pending requests to be served . This method waits until the threads have actually ceased their * operation . */ @ Override public void shutdown ( ) { } }
// mark shut down and exit if it already was shut down if ( ! isShutdown . compareAndSet ( false , true ) ) { return ; } // Remove shutdown hook to prevent resource leaks ShutdownHookUtil . removeShutdownHook ( shutdownHook , getClass ( ) . getSimpleName ( ) , LOG ) ; try { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Shutting down I/O manager." ) ; } // close writing and reading threads with best effort and log problems // first notify all to close , then wait until all are closed for ( WriterThread wt : writers ) { try { wt . shutdown ( ) ; } catch ( Throwable t ) { LOG . error ( "Error while shutting down IO Manager writer thread." , t ) ; } } for ( ReaderThread rt : readers ) { try { rt . shutdown ( ) ; } catch ( Throwable t ) { LOG . error ( "Error while shutting down IO Manager reader thread." , t ) ; } } try { for ( WriterThread wt : writers ) { wt . join ( ) ; } for ( ReaderThread rt : readers ) { rt . join ( ) ; } } catch ( InterruptedException iex ) { // ignore this on shutdown } } finally { // make sure we call the super implementation in any case and at the last point , // because this will clean up the I / O directories super . shutdown ( ) ; }
public class JSONParserBase { /** * use to return Primitive Type , or String , Or JsonObject or JsonArray * generated by a ContainerFactory */ protected < T > T parse ( JsonReaderI < T > mapper ) throws ParseException { } }
this . pos = - 1 ; T result ; try { read ( ) ; result = readFirst ( mapper ) ; if ( checkTaillingData ) { if ( ! checkTaillingSpace ) skipSpace ( ) ; if ( c != EOI ) throw new ParseException ( pos - 1 , ERROR_UNEXPECTED_TOKEN , c ) ; } } catch ( IOException e ) { throw new ParseException ( pos , e ) ; } xs = null ; xo = null ; return result ;
public class FixedFatJarExportPage { /** * TODO : Replace " private " by " protected " within JDT */ protected IPath [ ] getClasspath ( ILaunchConfiguration configuration ) throws CoreException { } }
IRuntimeClasspathEntry [ ] entries = JavaRuntime . computeUnresolvedRuntimeClasspath ( configuration ) ; entries = JavaRuntime . resolveRuntimeClasspath ( entries , configuration ) ; boolean isModularConfig = JavaRuntime . isModularConfiguration ( configuration ) ; ArrayList < IPath > userEntries = new ArrayList < > ( entries . length ) ; for ( int i = 0 ; i < entries . length ; i ++ ) { int classPathProperty = entries [ i ] . getClasspathProperty ( ) ; if ( ( ! isModularConfig && classPathProperty == IRuntimeClasspathEntry . USER_CLASSES ) || ( isModularConfig && ( classPathProperty == IRuntimeClasspathEntry . CLASS_PATH || classPathProperty == IRuntimeClasspathEntry . MODULE_PATH ) ) ) { String location = entries [ i ] . getLocation ( ) ; if ( location != null ) { IPath entry = Path . fromOSString ( location ) ; if ( ! userEntries . contains ( entry ) ) { userEntries . add ( entry ) ; } } } } return userEntries . toArray ( new IPath [ userEntries . size ( ) ] ) ;
public class ManagedInstancesInner { /** * Creates or updates a managed instance . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param managedInstanceName The name of the managed instance . * @ param parameters The requested managed instance resource state . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ManagedInstanceInner object */ public Observable < ManagedInstanceInner > beginCreateOrUpdateAsync ( String resourceGroupName , String managedInstanceName , ManagedInstanceInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , parameters ) . map ( new Func1 < ServiceResponse < ManagedInstanceInner > , ManagedInstanceInner > ( ) { @ Override public ManagedInstanceInner call ( ServiceResponse < ManagedInstanceInner > response ) { return response . body ( ) ; } } ) ;
public class CmsSearch { /** * Limits the search to a given list of resource types only . < p > * @ param resourceTypes the resource types to limit the search result to */ public void setResourceTypes ( String [ ] resourceTypes ) { } }
if ( resourceTypes != null ) { m_parameters . setResourceTypes ( Arrays . asList ( resourceTypes ) ) ; } else { m_parameters . setResourceTypes ( null ) ; } resetLastResult ( ) ;
public class Compiler { /** * Add an additional compilation unit into the loop * - > build compilation unit declarations , their bindings and record their results . */ @ Override public void accept ( ICompilationUnit sourceUnit , AccessRestriction accessRestriction ) { } }
// Switch the current policy and compilation result for this unit to the requested one . CompilationResult unitResult = new CompilationResult ( sourceUnit , this . totalUnits , this . totalUnits , this . options . maxProblemsPerUnit ) ; unitResult . checkSecondaryTypes = true ; try { if ( this . options . verbose ) { String count = String . valueOf ( this . totalUnits + 1 ) ; this . out . println ( Messages . bind ( Messages . compilation_request , new String [ ] { count , count , new String ( sourceUnit . getFileName ( ) ) } ) ) ; } // diet parsing for large collection of unit CompilationUnitDeclaration parsedUnit ; if ( this . totalUnits < this . parseThreshold ) { parsedUnit = this . parser . parse ( sourceUnit , unitResult ) ; } else { parsedUnit = this . parser . dietParse ( sourceUnit , unitResult ) ; } // initial type binding creation this . lookupEnvironment . buildTypeBindings ( parsedUnit , accessRestriction ) ; addCompilationUnit ( sourceUnit , parsedUnit ) ; // binding resolution this . lookupEnvironment . completeTypeBindings ( parsedUnit ) ; } catch ( AbortCompilationUnit e ) { // at this point , currentCompilationUnitResult may not be sourceUnit , but some other // one requested further along to resolve sourceUnit . if ( unitResult . compilationUnit == sourceUnit ) { // only report once this . requestor . acceptResult ( unitResult . tagAsAccepted ( ) ) ; } else { throw e ; // want to abort enclosing request to compile } }
public class AppServicePlansInner { /** * Get all apps associated with an App Service plan . * Get all apps associated with an App Service plan . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service plan . * @ param skipToken Skip to a web app in the list of webapps associated with app service plan . If specified , the resulting list will contain web apps starting from ( including ) the skipToken . Otherwise , the resulting list contains web apps from the start of the list * @ param filter Supported filter : $ filter = state eq running . Returns only web apps that are currently running * @ param top List page size . If specified , results are paged . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SiteInner & gt ; object */ public Observable < ServiceResponse < Page < SiteInner > > > listWebAppsWithServiceResponseAsync ( final String resourceGroupName , final String name , final String skipToken , final String filter , final String top ) { } }
return listWebAppsSinglePageAsync ( resourceGroupName , name , skipToken , filter , top ) . concatMap ( new Func1 < ServiceResponse < Page < SiteInner > > , Observable < ServiceResponse < Page < SiteInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SiteInner > > > call ( ServiceResponse < Page < SiteInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listWebAppsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class Serializer { /** * Writes an object to the given output stream . * The given object must have a { @ link Serializer # register ( Class ) registered } serializer or implement { @ link java . io . Serializable } . * If a serializable type ID was provided during registration , the type ID will be written to the given * { @ link Buffer } in lieu of the class name . Types with no associated type ID will be written * to the buffer with a full class name for reference during serialization . * Types that implement { @ link CatalystSerializable } will be serialized via * { @ link CatalystSerializable # writeObject ( BufferOutput , Serializer ) } unless a * { @ link TypeSerializer } was explicitly registered for the type . * Types that implement { @ link java . io . Serializable } will be serialized using Java ' s { @ link java . io . ObjectOutputStream } . * Types that implement { @ link java . io . Externalizable } will be serialized via that interface ' s methods unless a custom * { @ link TypeSerializer } has been registered for the type . { @ link java . io . Externalizable } types can , * however , still take advantage of faster serialization of type IDs . * @ param object The object to write . * @ param outputStream The output stream to which to write the object . * @ param < T > The object type . * @ return The serialized object . * @ throws SerializationException If no serializer is registered for the object . * @ see Serializer # writeObject ( Object ) */ public < T > OutputStream writeObject ( T object , OutputStream outputStream ) { } }
writeObject ( object , new OutputStreamBufferOutput ( outputStream ) ) ; return outputStream ;
public class DirFileEntryEnumIterator2 { /** * { @ inheritDoc } * @ see jcifs . smb . DirFileEntryEnumIteratorBase # getResults ( ) */ @ Override protected FileEntry [ ] getResults ( ) { } }
FileEntry [ ] results = this . response . getResults ( ) ; if ( results == null ) { return new FileEntry [ 0 ] ; } return results ;
public class ModeShapeEngine { /** * Get the deployed { @ link Repository } instance with the given the name . * @ param repositoryName the name of the deployed repository * @ return the named repository instance * @ throws IllegalArgumentException if the repository name is null , blank or invalid * @ throws NoSuchRepositoryException if there is no repository with the specified name * @ throws IllegalStateException if this engine is not { @ link # getState ( ) running } * @ see # deploy ( RepositoryConfiguration ) * @ see # undeploy ( String ) */ @ Override public final JcrRepository getRepository ( String repositoryName ) throws NoSuchRepositoryException { } }
CheckArg . isNotEmpty ( repositoryName , "repositoryName" ) ; checkRunning ( ) ; final Lock lock = this . lock . readLock ( ) ; try { lock . lock ( ) ; JcrRepository repository = repositories . get ( repositoryName ) ; if ( repository == null ) { throw new NoSuchRepositoryException ( JcrI18n . repositoryDoesNotExist . text ( repositoryName ) ) ; } return repository ; } finally { lock . unlock ( ) ; }
public class CSVPrinter { /** * Put a comment among the comma separated values . * Comments will always begin on a new line and occupy a * least one full line . The character specified to star * comments and a space will be inserted at the beginning of * each new line in the comment . * @ param comment the comment to output */ public void printlnComment ( String comment ) { } }
if ( this . strategy . isCommentingDisabled ( ) ) { return ; } if ( ! newLine ) { out . println ( ) ; } out . print ( this . strategy . getCommentStart ( ) ) ; out . print ( ' ' ) ; for ( int i = 0 ; i < comment . length ( ) ; i ++ ) { char c = comment . charAt ( i ) ; switch ( c ) { case '\r' : if ( i + 1 < comment . length ( ) && comment . charAt ( i + 1 ) == '\n' ) { i ++ ; } // break intentionally excluded . case '\n' : out . println ( ) ; out . print ( this . strategy . getCommentStart ( ) ) ; out . print ( ' ' ) ; break ; default : out . print ( c ) ; break ; } } out . println ( ) ; out . flush ( ) ; newLine = true ;
public class BigDecimal { /** * Tests if quotient has to be incremented according the roundingMode */ private static boolean needIncrement ( MutableBigInteger mdivisor , int roundingMode , int qsign , MutableBigInteger mq , MutableBigInteger mr ) { } }
assert ! mr . isZero ( ) ; int cmpFracHalf = mr . compareHalf ( mdivisor ) ; return commonNeedIncrement ( roundingMode , qsign , cmpFracHalf , mq . isOdd ( ) ) ;
public class EndpointUtil { /** * This method provides a decoding of a client based URI . * @ param clientUri The client URI * @ return The original URI */ public static String decodeClientURI ( String clientUri ) { } }
return clientUri . startsWith ( Constants . URI_CLIENT_PREFIX ) ? clientUri . substring ( Constants . URI_CLIENT_PREFIX . length ( ) ) : clientUri ;
public class AbstractCompact { /** * Return a copy of the provided array with updated memory layout . * @ param oldStorage * the current array * @ param defaultValue * default value for newly allocated array positions * @ param payload * the payload object * @ return a copy of the provided array with updated memory layout . * @ see # updateTransitionStorage ( Object [ ] , Object , Payload ) * @ see # updateTransitionStorage ( Object [ ] , IntFunction , Object , Payload ) */ protected final int [ ] updateTransitionStorage ( int [ ] oldStorage , int defaultValue , Payload payload ) { } }
return payload . type . updateStorage ( oldStorage , payload , int [ ] :: new , ( arr , idx ) -> arr [ idx ] = defaultValue ) ;
public class ServletUtil { /** * 获取客户端IP * headerNames参数用于自定义检测的Header < br > * 需要注意的是 , 使用此方法获取的客户IP地址必须在Http服务器 ( 例如Nginx ) 中配置头信息 , 否则容易造成IP伪造 。 * @ param request 请求对象 { @ link HttpServletRequest } * @ param headerNames 自定义头 , 通常在Http服务器 ( 例如Nginx ) 中配置 * @ return IP地址 * @ since 4.4.1 */ public static String getClientIPByHeader ( HttpServletRequest request , String ... headerNames ) { } }
String ip ; for ( String header : headerNames ) { ip = request . getHeader ( header ) ; if ( false == isUnknow ( ip ) ) { return getMultistageReverseProxyIp ( ip ) ; } } ip = request . getRemoteAddr ( ) ; return getMultistageReverseProxyIp ( ip ) ;
public class xen_upgrade { /** * < pre > * Use this operation to get version of xenserver . * < / pre > */ public static xen_upgrade [ ] get ( nitro_service client ) throws Exception { } }
xen_upgrade resource = new xen_upgrade ( ) ; resource . validate ( "get" ) ; return ( xen_upgrade [ ] ) resource . get_resources ( client ) ;
public class ToUnknownStream { /** * Adds an attribute to the currenly open tag * @ param uri the URI of a namespace * @ param localName the attribute name , without prefix * @ param rawName the attribute name , with prefix ( if any ) * @ param type the type of the attribute , typically " CDATA " * @ param value the value of the parameter * @ param XSLAttribute true if this attribute is coming from an xsl : attribute element * @ see ExtendedContentHandler # addAttribute ( String , String , String , String , String ) */ public void addAttribute ( String uri , String localName , String rawName , String type , String value , boolean XSLAttribute ) throws SAXException { } }
if ( m_firstTagNotEmitted ) { flush ( ) ; } m_handler . addAttribute ( uri , localName , rawName , type , value , XSLAttribute ) ;
public class AmazonCloudFrontClient { /** * Lists invalidation batches . * @ param listInvalidationsRequest * The request to list invalidations . * @ return Result of the ListInvalidations operation returned by the service . * @ throws InvalidArgumentException * The argument is invalid . * @ throws NoSuchDistributionException * The specified distribution does not exist . * @ throws AccessDeniedException * Access denied . * @ sample AmazonCloudFront . ListInvalidations * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudfront - 2018-11-05 / ListInvalidations " target = " _ top " > AWS * API Documentation < / a > */ @ Override public ListInvalidationsResult listInvalidations ( ListInvalidationsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListInvalidations ( request ) ;
public class AutoMlClient { /** * Deploys a model . If a model is already deployed , deploying it with the same parameters has no * effect . Deploying with different parametrs ( as e . g . changing * < p > [ node _ number ] [ google . cloud . automl . v1beta1 . ImageObjectDetectionModelDeploymentMetadata . node _ number ] * ) will update the deployment without pausing the model ' s availability . * < p > Only applicable for Text Classification , Image Object Detection and Tables ; all other * domains manage deployment automatically . * < p > Returns an empty response in the [ response ] [ google . longrunning . Operation . response ] field * when it completes . * < p > Sample code : * < pre > < code > * try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) { * ModelName name = ModelName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ MODEL ] " ) ; * DeployModelRequest request = DeployModelRequest . newBuilder ( ) * . setName ( name . toString ( ) ) * . build ( ) ; * autoMlClient . deployModelAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , OperationMetadata > deployModelAsync ( DeployModelRequest request ) { } }
return deployModelOperationCallable ( ) . futureCall ( request ) ;
public class TriggerUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( callbackMethod != null ) { request . addPostParam ( "CallbackMethod" , callbackMethod . toString ( ) ) ; } if ( callbackUrl != null ) { request . addPostParam ( "CallbackUrl" , callbackUrl . toString ( ) ) ; } if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; }
public class LLongSupplierBuilder { /** * One of ways of creating builder . In most cases ( considering all _ functional _ builders ) it requires to provide generic parameters ( in most cases redundantly ) */ @ Nonnull public final LLongSupplierBuilder withHandling ( @ Nonnull HandlingInstructions < RuntimeException , RuntimeException > handling ) { } }
Null . nonNullArg ( handling , "handling" ) ; if ( this . handling != null ) { throw new UnsupportedOperationException ( "Handling is already set for this builder." ) ; } this . handling = handling ; return self ( ) ;
public class RequestUtil { /** * Filter the specified message string for characters that are sensitive in HTML . This avoids potential attacks caused by * including JavaScript codes in the request URL that is often reported in error messages . * @ param message The message string to be filtered * @ return the filtered message */ public static String filter ( String message ) { } }
if ( message == null ) { return ( null ) ; } char content [ ] = new char [ message . length ( ) ] ; message . getChars ( 0 , message . length ( ) , content , 0 ) ; StringBuilder result = new StringBuilder ( content . length + 50 ) ; for ( int i = 0 ; i < content . length ; i ++ ) { switch ( content [ i ] ) { case '<' : result . append ( "&lt;" ) ; break ; case '>' : result . append ( "&gt;" ) ; break ; case '&' : result . append ( "&amp;" ) ; break ; case '"' : result . append ( "&quot;" ) ; break ; default : result . append ( content [ i ] ) ; } } return ( result . toString ( ) ) ;
public class ParsedElement { /** * Just like makeDouble ( ) , but creates a double primitive value instead of a * Double object . Much more efficient if you don ' t need the object . * @ param obj Any double convertible object * @ return The double primitive value . */ public static double makeDoubleValue ( Object obj ) { } }
if ( obj == null ) { return Double . NaN ; } return CommonServices . getCoercionManager ( ) . makePrimitiveDoubleFrom ( obj ) ;
public class LogView { /** * Removes a filter to the view . * @ param filter */ public void removeFilter ( AbstractLogFilter < E > filter ) { } }
Validate . notNull ( filter ) ; filters . remove ( filter ) ; uptodate = false ; filter . deleteObserver ( this ) ;
public class PathType { /** * Creates a URI for the path with the given root and names in the file system with the given URI . */ public final URI toUri ( URI fileSystemUri , String root , Iterable < String > names , boolean directory ) { } }
String path = toUriPath ( root , names , directory ) ; try { // it should not suck this much to create a new URI that ' s the same except with a path set = ( // need to do it this way for automatic path escaping return new URI ( fileSystemUri . getScheme ( ) , fileSystemUri . getUserInfo ( ) , fileSystemUri . getHost ( ) , fileSystemUri . getPort ( ) , path , null , null ) ; } catch ( URISyntaxException e ) { throw new AssertionError ( e ) ; }
public class DefaultRounding { /** * ( non - Javadoc ) * @ see javax . money . MonetaryFunction # apply ( java . lang . Object ) */ @ Override public MonetaryAmount apply ( MonetaryAmount amount ) { } }
return amount . getFactory ( ) . setCurrency ( amount . getCurrency ( ) ) . setNumber ( amount . getNumber ( ) . numberValue ( BigDecimal . class ) . setScale ( this . context . getInt ( SCALE_KEY ) , this . context . get ( RoundingMode . class ) ) ) . create ( ) ;
public class PolicyDescription { /** * The policy attributes . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPolicyAttributeDescriptions ( java . util . Collection ) } or * { @ link # withPolicyAttributeDescriptions ( java . util . Collection ) } if you want to override the existing values . * @ param policyAttributeDescriptions * The policy attributes . * @ return Returns a reference to this object so that method calls can be chained together . */ public PolicyDescription withPolicyAttributeDescriptions ( PolicyAttributeDescription ... policyAttributeDescriptions ) { } }
if ( this . policyAttributeDescriptions == null ) { setPolicyAttributeDescriptions ( new com . amazonaws . internal . SdkInternalList < PolicyAttributeDescription > ( policyAttributeDescriptions . length ) ) ; } for ( PolicyAttributeDescription ele : policyAttributeDescriptions ) { this . policyAttributeDescriptions . add ( ele ) ; } return this ;
public class ConvertUtil { /** * Converts value to Integer if it can . If value is an Integer , it is returned , if it is a Number , it is * promoted to Integer and then returned , in all other cases , it converts the value to String , * then tries to parse Integer from it . * @ param value value to be converted to Integer . * @ return value converted to Integer . * @ throws ConversionException if failing to do the conversion */ public static Integer toInteger ( Object value ) throws ConversionException { } }
if ( value == null ) { return null ; } else if ( value instanceof Number ) { return ( ( Number ) value ) . intValue ( ) ; } else { NumberFormat nf = new DecimalFormat ( ) ; try { return nf . parse ( value . toString ( ) ) . intValue ( ) ; } catch ( ParseException e ) { throw new ConversionException ( "failed to convert: '" + value + "' to Integer" , e ) ; } }
public class HttpUtils { /** * Build error message from connection in case of failure * @ param connection HttpURLConnection * @ return String by combining response code , message and error stream * @ throws IOException an IO exception */ public static String buildHttpErrorMessage ( final HttpURLConnection connection ) throws IOException { } }
final StringBuilder messageBuilder = new StringBuilder ( "(" ) . append ( connection . getResponseCode ( ) ) . append ( ")" ) ; if ( connection . getResponseMessage ( ) != null ) { messageBuilder . append ( " " ) ; messageBuilder . append ( connection . getResponseMessage ( ) ) ; } try ( final InputStreamReader isr = new InputStreamReader ( connection . getErrorStream ( ) , StandardCharsets . UTF_8 ) ; BufferedReader br = new BufferedReader ( isr ) ) { String output ; messageBuilder . append ( "[" ) ; while ( ( output = br . readLine ( ) ) != null ) { messageBuilder . append ( output ) ; } messageBuilder . append ( "]" ) ; } finally { connection . disconnect ( ) ; } return messageBuilder . toString ( ) ;
public class ScheduleManager { /** * only do this when using external runner */ private synchronized void updateLocal ( ) throws ScheduleManagerException { } }
final List < Schedule > updates = this . loader . loadUpdatedSchedules ( ) ; for ( final Schedule s : updates ) { if ( s . getStatus ( ) . equals ( TriggerStatus . EXPIRED . toString ( ) ) ) { onScheduleExpire ( s ) ; } else { internalSchedule ( s ) ; } }
public class HeaderLinksResponseEntity { /** * Wraps the given { @ link HttpEntity } into a { @ link HeaderLinksResponseEntity } . Will default the status code to * { @ link HttpStatus # OK } if the given value is not a { @ link ResponseEntity } . * @ param entity must not be { @ literal null } . * @ return */ public static < S extends RepresentationModel < ? > > HeaderLinksResponseEntity < S > wrap ( HttpEntity < S > entity ) { } }
Assert . notNull ( entity , "Given HttpEntity must not be null!" ) ; if ( entity instanceof ResponseEntity ) { return new HeaderLinksResponseEntity < > ( ( ResponseEntity < S > ) entity ) ; } else { return new HeaderLinksResponseEntity < > ( entity ) ; }
public class UpdateSketch { /** * Wrap takes the sketch image in Memory and refers to it directly . There is no data copying onto * the java heap . Only " Direct " Serialization Version 3 ( i . e , OpenSource ) sketches that have * been explicitly stored as direct objects can be wrapped . * An attempt to " wrap " earlier version sketches will result in a " heapified " , normal * Java Heap version of the sketch where all data will be copied to the heap . * @ param srcMem an image of a Sketch where the image seed hash matches the given seed hash . * < a href = " { @ docRoot } / resources / dictionary . html # mem " > See Memory < / a > * @ param seed < a href = " { @ docRoot } / resources / dictionary . html # seed " > See Update Hash Seed < / a > . * Compact sketches store a 16 - bit hash of the seed , but not the seed itself . * @ return a UpdateSketch backed by the given Memory */ public static UpdateSketch wrap ( final WritableMemory srcMem , final long seed ) { } }
final int preLongs = srcMem . getByte ( PREAMBLE_LONGS_BYTE ) & 0X3F ; final int serVer = srcMem . getByte ( SER_VER_BYTE ) & 0XFF ; final int familyID = srcMem . getByte ( FAMILY_BYTE ) & 0XFF ; final Family family = Family . idToFamily ( familyID ) ; if ( family != Family . QUICKSELECT ) { throw new SketchesArgumentException ( "A " + family + " sketch cannot be wrapped as an UpdateSketch." ) ; } if ( ( serVer == 3 ) && ( preLongs == 3 ) ) { return DirectQuickSelectSketch . writableWrap ( srcMem , seed ) ; } else { throw new SketchesArgumentException ( "Corrupted: An UpdateSketch image: must have SerVer = 3 and preLongs = 3" ) ; }
public class RESTServlet { /** * Split the given k = v ( or just k ) param into a Pair object . */ private Pair < String , String > extractParam ( String part ) { } }
int eqInx = part . indexOf ( '=' ) ; String paramName ; String paramValue ; if ( eqInx < 0 ) { paramName = part ; paramValue = null ; } else { paramName = part . substring ( 0 , eqInx ) ; paramValue = part . substring ( eqInx + 1 ) ; } return Pair . create ( paramName , paramValue ) ;
public class StringUtils { /** * Generates a random string of a given length * @ param length The length of the string * @ param min The min character in the string * @ param max The max character in the string * @ return A string of random characters */ public static String randomString ( int length , int min , int max ) { } }
return randomString ( length , min , max , CharMatcher . ANY ) ;
public class EntityREST { /** * Delete an entity identified by its type and unique attributes . * @ param typeName - entity type to be deleted * @ param servletRequest - request containing unique attributes / values * @ return EntityMutationResponse */ @ DELETE @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) @ Path ( "/uniqueAttribute/type/{typeName}" ) public EntityMutationResponse deleteByUniqueAttribute ( @ PathParam ( "typeName" ) String typeName , @ Context HttpServletRequest servletRequest ) throws AtlasBaseException { } }
AtlasPerfTracer perf = null ; try { Map < String , Object > attributes = getAttributes ( servletRequest ) ; if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "EntityREST.deleteByUniqueAttribute(" + typeName + "," + attributes + ")" ) ; } AtlasEntityType entityType = ensureEntityType ( typeName ) ; return entitiesStore . deleteByUniqueAttributes ( entityType , attributes ) ; } finally { AtlasPerfTracer . log ( perf ) ; }
public class FSNamesystem { /** * This is called from the RaidEncodingTaskEncodingMonitor to process * encoding tasks * Each call will poll ( raidEncodingTaskMultiplier * nodes ) stripes from * raidEncodingTasks . * Then for each stripe , it will compute necessary information to encode * the stripe and add encoding tasks to datanodes ' queue . * If it fails to compute , it will add back to the raidEncodingTasks * for the next round */ private void processRaidEncodingTaskAsync ( ) { } }
// Should not schedule raiding during safe mode if ( isInSafeMode ( ) ) { return ; } final int nodes = heartbeats . size ( ) ; List < RaidBlockInfo > tasksToProcess = new ArrayList < RaidBlockInfo > ( Math . min ( raidEncodingTasks . size ( ) , ReplicationConfigKeys . raidEncodingTaskMultiplier * nodes ) ) ; for ( int i = 0 ; i < ReplicationConfigKeys . raidEncodingTaskMultiplier ; i ++ ) { writeLock ( ) ; try { NameNode . getNameNodeMetrics ( ) . numRaidEncodingTasks . set ( raidEncodingTasks . size ( ) ) ; raidEncodingTasks . pollNToList ( nodes , tasksToProcess ) ; if ( raidEncodingTasks . isEmpty ( ) ) { break ; } } finally { writeUnlock ( ) ; } } ArrayList < RaidBlockInfo > tasksToAdd = new ArrayList < RaidBlockInfo > ( ) ; for ( RaidBlockInfo rbi : tasksToProcess ) { if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "BLOCK* NameSystem.processRaidEncodingTaskAsync: " + rbi ) ; } if ( processRaidEncodingTask ( rbi ) ) { tasksToAdd . add ( rbi ) ; } } if ( ! tasksToAdd . isEmpty ( ) ) { writeLock ( ) ; try { for ( RaidBlockInfo rbi : tasksToAdd ) { raidEncodingTasks . add ( rbi ) ; } } finally { writeUnlock ( ) ; } }
public class SAXParser { /** * Parse the content of the given { @ link java . io . InputStream } * instance as XML using the specified * { @ link org . xml . sax . helpers . DefaultHandler } . * @ param is InputStream containing the content to be parsed . * @ param dh The SAX DefaultHandler to use . * @ param systemId The systemId which is needed for resolving relative URIs . * @ throws IllegalArgumentException If the given InputStream is null . * @ throws IOException If any IO errors occur . * @ throws SAXException If any SAX errors occur during processing . * @ see org . xml . sax . DocumentHandler version of this method instead . */ public void parse ( InputStream is , DefaultHandler dh , String systemId ) throws SAXException , IOException { } }
if ( is == null ) { throw new IllegalArgumentException ( "InputStream cannot be null" ) ; } InputSource input = new InputSource ( is ) ; input . setSystemId ( systemId ) ; this . parse ( input , dh ) ;
public class ExposeLinearLayoutManagerEx { /** * { @ inheritDoc } */ @ Override public int scrollVerticallyBy ( int dy , RecyclerView . Recycler recycler , RecyclerView . State state ) { } }
if ( getOrientation ( ) == HORIZONTAL ) { return 0 ; } return scrollInternalBy ( dy , recycler , state ) ;
public class PemObjectReader { /** * Gets the pem object . * @ param file * the file * @ return the pem object * @ throws IOException * Signals that an I / O exception has occurred . */ public static PemObject getPemObject ( final File file ) throws IOException { } }
PemObject pemObject ; try ( PemReader pemReader = new PemReader ( new InputStreamReader ( new FileInputStream ( file ) ) ) ) { pemObject = pemReader . readPemObject ( ) ; } return pemObject ;
public class VoiceClient { /** * Modify an ongoing call . * This method modifies an ongoing call , identified by " uuid " . Modifications to the call can be one of : * < ul > * < li > Terminate the call ( hangup ) * < li > Mute a call leg ( mute ) * < li > Unmute a call leg ( unmute ) * < li > Earmuff a call leg ( earmuff ) * < li > Unearmuff a call leg ( unearmuff ) * < / ul > * @ param uuid The UUID of the call , obtained from the object returned by { @ link # createCall ( Call ) } . * This value can be obtained with { @ link CallEvent # getUuid ( ) } * @ param action One of : " hangup " , " mute " , " unmute " , " earmuff " , " unearmuff " * @ return A ModifyCallResponse object , representing the response from the Nexmo Voice API . * @ throws IOException if a network error occurred contacting the Nexmo Voice API . * @ throws NexmoClientException if there was a problem with the Nexmo request or response objects . */ public ModifyCallResponse modifyCall ( String uuid , ModifyCallAction action ) throws IOException , NexmoClientException { } }
return this . modifyCall ( new CallModifier ( uuid , action ) ) ;
public class TypeTransformationParser { /** * A raw type expression must be of the form rawTypeOf ( TTLExp ) */ private boolean validRawTypeOfTypeExpression ( Node expr ) { } }
// The expression must have two children . The rawTypeOf keyword and the // parameter if ( ! checkParameterCount ( expr , Keywords . RAWTYPEOF ) ) { return false ; } // The parameter must be a valid type expression if ( ! validTypeTransformationExpression ( getCallArgument ( expr , 0 ) ) ) { warnInvalidInside ( Keywords . RAWTYPEOF . name , expr ) ; return false ; } return true ;
public class OtpErlangLong { /** * Get this number as a short . * @ return the value of this number , as a short . * @ exception OtpErlangRangeException * if the value is too large to be represented as a short . */ public short shortValue ( ) throws OtpErlangRangeException { } }
final long l = longValue ( ) ; final short i = ( short ) l ; if ( i != l ) { throw new OtpErlangRangeException ( "Value too large for short: " + val ) ; } return i ;
public class LogPrior { /** * why isn ' t this functionality in enum ? */ private static LogPriorType intToType ( int intPrior ) { } }
LogPriorType [ ] values = LogPriorType . values ( ) ; for ( LogPriorType val : values ) { if ( val . ordinal ( ) == intPrior ) { return val ; } } throw new IllegalArgumentException ( intPrior + " is not a legal LogPrior." ) ;
public class EarGenericBuilder { /** * Generates deployment for given application . * @ param type Module type to generate * @ param basename Base name of module to generate * @ param doFiltering should do basic filtering * @ return EnterpriseArchive containing given module and all dependencies */ public static EnterpriseArchive getModuleDeployment ( ModuleType type , String basename , boolean doFiltering ) { } }
String name = basename + "." + type . getExtension ( ) ; String testJarName = basename + "-tests.jar" ; // LOG . debug ( " Creating Arquillian deployment for [ " + name + " ] " ) ; try { EarDescriptorBuilder descriptorBuilder = new EarDescriptorBuilder ( basename ) ; MavenResolverSystem maven = Maven . resolver ( ) ; // ConfigurableMavenResolverSystem maven = Maven . configureResolver ( ) . workOffline ( ) . withMavenCentralRepo ( false ) ; EnterpriseArchive ear = ShrinkWrap . create ( EnterpriseArchive . class , basename + "-full.ear" ) ; PomEquippedResolveStage resolveStage = maven . loadPomFromFile ( "pom.xml" ) ; // przejrzenie dependency oznaczonych jako provided w celu znalezienia EJB ' ków MavenResolvedArtifact [ ] provided = resolveStage . importRuntimeDependencies ( ) . importDependencies ( ScopeType . PROVIDED ) . resolve ( ) . using ( new AcceptScopesStrategy ( ScopeType . PROVIDED ) ) . asResolvedArtifact ( ) ; for ( MavenResolvedArtifact mra : provided ) { // System . out . println ( " Checking provided : " + mra . getCoordinate ( ) . toCanonicalForm ( ) ) ; if ( isArtifactEjb ( mra . getCoordinate ( ) ) ) { ear . addAsModule ( mra . as ( JavaArchive . class ) ) ; // dodajemy jako moduł descriptorBuilder . addEjb ( mra . asFile ( ) . getName ( ) ) ; // przeglądamy dependency EJB ' ka w celu pobrania także zależności z EJB ' ka for ( MavenArtifactInfo mai : mra . getDependencies ( ) ) { // LOG . debug ( " Resolved : " + mai . getCoordinate ( ) . getGroupId ( ) + " : " + mai . getCoordinate ( ) . getArtifactId ( ) ) ; // pomijamy wzajemne zależności do innych EJB ' ków if ( ! isArtifactEjb ( mai . getCoordinate ( ) ) ) { for ( MavenResolvedArtifact reqMra : provided ) { if ( reqMra . getCoordinate ( ) . toCanonicalForm ( ) . equals ( mai . getCoordinate ( ) . toCanonicalForm ( ) ) ) { // dodanie zależności do lib ' ów ear . addAsLibrary ( reqMra . asFile ( ) ) ; break ; } } } } } } MavenResolvedArtifact [ ] deps = resolveStage . importRuntimeAndTestDependencies ( ) . resolve ( ) . withTransitivity ( ) . asResolvedArtifact ( ) ; for ( MavenResolvedArtifact mra : deps ) { MavenCoordinate mc = mra . getCoordinate ( ) ; PackagingType packaging = mc . getPackaging ( ) ; if ( doFiltering && isFiltered ( mc ) ) { continue ; } LOG . log ( Level . FINEST , "Adding: {0}" , mc . toCanonicalForm ( ) ) ; if ( isArtifactEjb ( mc ) ) { // dependency w postaci ejb ' ków ear . addAsModule ( mra . as ( JavaArchive . class ) ) ; descriptorBuilder . addEjb ( mra . asFile ( ) . getName ( ) ) ; } else if ( packaging . equals ( PackagingType . WAR ) ) { // dependency w postaci war ' ów ear . addAsModule ( mra . as ( WebArchive . class ) ) ; descriptorBuilder . addWeb ( mra . asFile ( ) . getName ( ) ) ; } else { // resztę dodajemy jako lib ear . addAsLibrary ( mra . asFile ( ) ) ; } } // utworzenie głównego archiwum // Archive < ? > module = ShrinkWrap . create ( MavenImporter . class , name ) // . loadPomFromFile ( " pom . xml " ) // . as ( type . getType ( ) ) ; Archive < ? > module = ShrinkWrap . create ( ExplodedImporter . class , name ) . importDirectory ( type . getExplodedDir ( basename ) ) . as ( type . getType ( ) ) ; JavaArchive testJar = ShrinkWrap . create ( ExplodedImporter . class , testJarName ) . importDirectory ( "target/test-classes" ) . as ( JavaArchive . class ) ; module = module . merge ( testJar , type . getMergePoint ( ) ) ; // mergeReplace ( ear , module , testJar ) ; module . add ( new StringAsset ( RUN_AT_ARQUILLIAN_CONTENT ) , RUN_AT_ARQUILLIAN_PATH ) ; LOG . log ( Level . FINE , module . toString ( true ) ) ; addMainModule ( ear , type , module , descriptorBuilder ) ; // Workaround for arquillian bug if ( ! descriptorBuilder . containsWar ( ) ) { String testModuleName = ModuleType . WAR . generateModuleName ( ) + ".war" ; ear . addAsModule ( ShrinkWrap . create ( WebArchive . class , testModuleName ) ) ; descriptorBuilder . addWeb ( testModuleName ) ; } ear . setApplicationXML ( new StringAsset ( descriptorBuilder . render ( ) ) ) ; ear . addManifest ( ) ; LOG . log ( Level . INFO , "Created deployment [{0}]" , ear . getName ( ) ) ; // System . out . println ( ear . toString ( true ) ) ; // System . out . println ( descriptorBuilder . render ( ) ) ; return ear ; } catch ( IllegalArgumentException ex ) { throw new IllegalStateException ( "Error in creating deployment [" + ex + "]" , ex ) ; } catch ( InvalidConfigurationFileException ex ) { throw new IllegalStateException ( "Error in creating deployment [" + ex + "]" , ex ) ; } catch ( ArchiveImportException ex ) { throw new IllegalStateException ( "Error in creating deployment [" + ex + "]" , ex ) ; }
public class Code { /** * Returns the local for the parameter at index { @ code index } and of type * { @ code type } . */ public < T > Local < T > getParameter ( int index , TypeId < T > type ) { } }
if ( thisLocal != null ) { index ++ ; // adjust for the hidden ' this ' parameter } return coerce ( parameters . get ( index ) , type ) ;
public class OWLObjectPropertyAssertionAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLObjectPropertyAssertionAxiomImpl instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class CachingJobCatalog { /** * { @ inheritDoc } */ @ Override public JobSpec getJobSpec ( URI uri ) throws JobSpecNotFoundException { } }
try { return _cache . getJobSpec ( uri ) ; } catch ( RuntimeException e ) { return _fallback . getJobSpec ( uri ) ; }
public class Ortc { /** * Saves the authentication token channels permissions in the ORTC server . * < pre > * HashMap & lt ; String , LinkedList & lt ; ChannelPermissions & gt ; & gt ; permissions = new HashMap & lt ; String , LinkedList & lt ; ChannelPermissions & gt ; & gt ; ( ) ; * LinkedList & lt ; ChannelPermissions & gt ; channelPermissions = new LinkedList & lt ; ChannelPermissions & gt ; ( ) ; * channelPermissions . add ( ChannelPermissions . Write ) ; * channelPermissions . add ( ChannelPermissions . Presence ) ; * permissions . put ( & quot ; channel & quot ; , channelPermissions ) ; * if ( ! Ortc . saveAuthentication ( & quot ; http : / / ortc - developers . realtime . co / server / 2.1 / & quot ; , * true , & quot ; SessionId & quot ; , false , & quot ; APPKEY & quot ; , 1800 , & quot ; PVTKEY & quot ; , permissions ) ) { * throw new Exception ( & quot ; Was not possible to authenticate & quot ; ) ; * < / pre > * @ param url * Ortc Server Url * @ param isCluster * Indicates whether the ORTC server is in a cluster . * @ param authenticationToken * Authentication Token which is generated by the application * server , for instance a unique session ID . * @ param authenticationTokenIsPrivate * Indicates whether the authentication token is private ( true ) * or not ( false ) * @ param applicationKey * Application Key that was provided to you together with the * ORTC service purchasing . * @ param timeToLive * The authentication token time to live , in other words , the * allowed activity time ( in seconds ) . * @ param privateKey * The private key provided to you together with the ORTC service * purchasing . * @ param permissions * HashMap & lt ; String , LinkedList & lt ; String , ChannelPermissions & gt ; * & gt ; permissions The channels and their permissions ( w : * write / read or r : read or p : presence , case sensitive ) . * @ param onCompleted * The callback that is executed after the save authentication is completed * @ throws ibt . ortc . api . OrtcAuthenticationNotAuthorizedException * @ throws InvalidBalancerServerException */ public static void saveAuthentication ( String url , boolean isCluster , final String authenticationToken , final boolean authenticationTokenIsPrivate , final String applicationKey , final int timeToLive , final String privateKey , final HashMap < String , LinkedList < ChannelPermissions > > permissions , final OnRestWebserviceResponse onCompleted ) throws IOException , InvalidBalancerServerException , OrtcAuthenticationNotAuthorizedException { } }
String connectionUrl = url ; if ( isCluster ) { Balancer . getServerFromBalancerAsync ( url , applicationKey , new OnRestWebserviceResponse ( ) { @ Override public void run ( Exception error , String response ) { if ( error != null ) { onCompleted . run ( error , null ) ; } else { saveAuthenticationAsync ( response , authenticationToken , authenticationTokenIsPrivate , applicationKey , timeToLive , privateKey , permissions , onCompleted ) ; } } } ) ; } else { saveAuthenticationAsync ( connectionUrl , authenticationToken , authenticationTokenIsPrivate , applicationKey , timeToLive , privateKey , permissions , onCompleted ) ; }
public class HttpUtils { /** * Deletes a cookie . * @ param name the name * @ param req HTTP request * @ param res HTTP response */ public static void removeStateParam ( String name , HttpServletRequest req , HttpServletResponse res ) { } }
setRawCookie ( name , "" , req , res , false , 0 ) ;
public class AgentManager { /** * On AgentsEvent create a new Agent . * @ param event generated by Asterisk server . */ void handleAgentsEvent ( AgentsEvent event ) { } }
AsteriskAgentImpl agent = new AsteriskAgentImpl ( server , event . getName ( ) , "Agent/" + event . getAgent ( ) , AgentState . valueOf ( event . getStatus ( ) ) ) ; logger . info ( "Adding agent " + agent . getName ( ) + "(" + agent . getAgentId ( ) + ")" ) ; addAgent ( agent ) ;
public class Address { public static RetrieveRequest retrieve ( ) throws IOException { } }
String uri = uri ( "addresses" ) ; return new RetrieveRequest ( Method . GET , uri ) ;
public class MissingDataHandler { /** * This function fills NA with given values . Default using a scalar value fillNA ( value , null , 0 , null , null , null , * false ) * @ param value a scalar value to fill all NAs * @ param method = ' ffill ' for forward fill or ' bfill ' for backward fill * @ param limit = maximum size gap for forward or backward fill * @ param function aggregate function to generate the filled value for a column * @ param columnsToValues = a map to provide different values to fill for different columns * @ param columns = only consider NA filling on the given columns , set to null for all columns of the DDF * @ return a DDF with NAs filled */ @ Override public DDF fillNA ( String value , FillMethod method , long limit , AggregateFunction function , Map < String , String > columnsToValues , List < String > columns ) throws DDFException { } }
DDF newddf = null ; if ( columns == null ) { columns = this . getDDF ( ) . getColumnNames ( ) ; } if ( method == null ) { String sqlCmd = fillNAWithValueSQL ( value , function , columnsToValues , columns ) ; mLog . info ( "FillNA sql command: " + sqlCmd ) ; newddf = this . getManager ( ) . sql2ddf ( String . format ( sqlCmd , this . getDDF ( ) . getTableName ( ) ) , false ) ; } else { // interpolation methods ' ffill ' or ' bfill ' // TODO : } newddf . getMetaDataHandler ( ) . copyFactor ( this . getDDF ( ) ) ; return newddf ;
public class ListLabelingJobsForWorkteamResult { /** * An array of < code > LabelingJobSummary < / code > objects , each describing a labeling job . * @ param labelingJobSummaryList * An array of < code > LabelingJobSummary < / code > objects , each describing a labeling job . */ public void setLabelingJobSummaryList ( java . util . Collection < LabelingJobForWorkteamSummary > labelingJobSummaryList ) { } }
if ( labelingJobSummaryList == null ) { this . labelingJobSummaryList = null ; return ; } this . labelingJobSummaryList = new java . util . ArrayList < LabelingJobForWorkteamSummary > ( labelingJobSummaryList ) ;
public class Content { /** * Retrieve content map instance . Delegates { @ link # getValue ( Object , String ) } to obtain the requested value . If value * is null warn the event an return empty map . If value is map return it , otherwise throws context exception . * @ param scope scope object , * @ param propertyPath object property path . * @ return map instance , possible empty . * @ throws TemplateException if requested value is undefined or not a { @ link Types # isMap ( Object ) map } . */ Map < ? , ? > getMap ( Object scope , String propertyPath ) throws TemplateException { } }
Object map = getValue ( scope , propertyPath ) ; if ( map == null ) { warn ( scope . getClass ( ) , propertyPath ) ; return Collections . EMPTY_MAP ; } if ( ! Types . isMap ( map ) ) { throw new TemplateException ( "Invalid type. Expected map but got |%s|." , map . getClass ( ) ) ; } return ( Map < ? , ? > ) map ;
public class CmsLuceneDocument { /** * Adds a field to this document . < p > * @ param f the field to add */ private void add ( Field f ) { } }
m_fields . put ( f . name ( ) , f ) ; m_doc . add ( f ) ;
public class ParsingExpression { /** * parse a day number ( 0 = sunday ) and put it in the private _ day attribute * @ return boolean */ private boolean _readChar ( ) { } }
int endIndex = pos + 1 ; if ( endIndex <= len ) { _char = text . substring ( pos , endIndex ) ; pos ++ ; return true ; } pos = len ; return false ;
public class SubmissionUtils { /** * Re - creates the document submitted by the client from * the submission document . * @ param submissionDoc Submission document from the submission database * @ return Document submitted by user for update */ static public JSONObject getSubmittedDocumentFromSubmission ( JSONObject submissionDoc ) throws Exception { } }
JSONObject submissionInfo = submissionDoc . getJSONObject ( "nunaliit_submission" ) ; JSONObject doc = submissionInfo . getJSONObject ( "submitted_doc" ) ; JSONObject reserved = submissionInfo . optJSONObject ( "submitted_reserved" ) ; return recreateDocumentFromDocAndReserved ( doc , reserved ) ;
public class LessLockingUniversalPgSQLQueueFactory { /** * { @ inheritDoc } */ @ Override protected LessLockingUniversalPgSQLQueue createQueueInstance ( final QueueSpec spec ) { } }
LessLockingUniversalPgSQLQueue queue = new LessLockingUniversalPgSQLQueue ( ) ; queue . setFifo ( defaultFifo ) ; Boolean fifo = spec . getField ( SPEC_FIELD_FIFO , Boolean . class ) ; if ( fifo != null ) { queue . setFifo ( fifo . booleanValue ( ) ) ; } return queue ;
public class Targeting { /** * Gets the technologyTargeting value for this Targeting . * @ return technologyTargeting * Specifies the browsing technologies that are targeted by the * { @ link LineItem } . This attribute is optional . */ public com . google . api . ads . admanager . axis . v201808 . TechnologyTargeting getTechnologyTargeting ( ) { } }
return technologyTargeting ;
public class WhiteboxImpl { /** * Set the value of a field using reflection . This method will traverse the * super class hierarchy until the first field assignable to the * < tt > value < / tt > type is found . The < tt > value < / tt > ( or * < tt > additionaValues < / tt > if present ) will then be assigned to this field . * @ param object the object to modify * @ param value the new value of the field * @ param additionalValues Additional values to set on the object */ public static void setInternalState ( Object object , Object value , Object ... additionalValues ) { } }
setField ( object , value , findFieldInHierarchy ( object , new AssignableFromFieldTypeMatcherStrategy ( getType ( value ) ) ) ) ; if ( additionalValues != null && additionalValues . length > 0 ) { for ( Object additionalValue : additionalValues ) { setField ( object , additionalValue , findFieldInHierarchy ( object , new AssignableFromFieldTypeMatcherStrategy ( getType ( additionalValue ) ) ) ) ; } }
public class FSM2MealyParserAlternating { /** * Creates the actual Mealy machine transitions . * @ throws FSMParseException * when the Mealy machine is partial . */ @ Override protected void checkTransitions ( StreamTokenizer streamTokenizer ) throws FSMParseException { } }
// Only if no states are defined we add all from the transitions we found . // This is necessary because states are not necessarily defined in FSMs . if ( getStates ( ) . isEmpty ( ) ) { getStates ( ) . addAll ( transitionsFSM . keySet ( ) ) ; } // copy the set of states final Set < Integer > newStates = new HashSet < > ( getStates ( ) ) ; // retrieve the initial state in the FSM source final Integer initialState = getStates ( ) . iterator ( ) . next ( ) ; // make the actual FSM transitions makeTransitions ( initialState , null , newStates , 0 , output != null ? new WordBuilder < > ( ) : null , streamTokenizer ) ; // check we do not have a partial FSM if ( ! newStates . isEmpty ( ) ) { throw new FSMParseException ( String . format ( PARTIAL_FSM , newStates , initialState ) , streamTokenizer ) ; }
public class Ssh2Context { /** * Set the preferred mac for the Client - > Server stream . * @ param name * @ throws SshException */ public void setPreferredMacCS ( String name ) throws SshException { } }
if ( name == null ) return ; if ( macCS . contains ( name ) ) { prefMacCS = name ; setMacPreferredPositionCS ( name , 0 ) ; } else { throw new SshException ( name + " is not supported" , SshException . UNSUPPORTED_ALGORITHM ) ; }
public class DeepWaterModelV3 { /** * Version & Schema - specific filling into the impl */ @ Override public DeepWaterModel createImpl ( ) { } }
DeepWaterParameters parms = parameters . createImpl ( ) ; return new DeepWaterModel ( Key . < DeepWaterModel > make ( ) /* dest */ , parms , new DeepWaterModelOutput ( null ) , null , null , 0 ) ;
public class MutationState { /** * Add one or more { @ link DocumentFragment DocumentFragments } to this { @ link MutationState } . * @ param documentFragments the fragments where the tokens are extracted from . * @ return the modified { @ link MutationState } . */ public MutationState add ( DocumentFragment ... documentFragments ) { } }
if ( documentFragments == null || documentFragments . length == 0 ) { throw new IllegalArgumentException ( "At least one DocumentFragment must be provided." ) ; } for ( DocumentFragment d : documentFragments ) { addToken ( d . mutationToken ( ) ) ; } return this ;
public class CmsUpdateDBDropOldIndexes { /** * Gets the indexes for a table . < p > * @ param dbCon the db connection interface * @ param tablename the table to get the indexes from * @ return a list of indexes * @ throws SQLException if somehting goes wrong */ private List < String > getIndexes ( CmsSetupDb dbCon , String tablename ) throws SQLException { } }
List < String > indexes = new ArrayList < String > ( ) ; String tableIndex = readQuery ( QUERY_SHOW_INDEX ) ; Map < String , String > replacer = new HashMap < String , String > ( ) ; replacer . put ( REPLACEMENT_TABLENAME , tablename ) ; CmsSetupDBWrapper db = null ; try { db = dbCon . executeSqlStatement ( tableIndex , replacer ) ; while ( db . getResultSet ( ) . next ( ) ) { String index = db . getResultSet ( ) . getString ( FIELD_INDEX ) ; if ( ! indexes . contains ( index ) ) { indexes . add ( index ) ; } } } finally { if ( db != null ) { db . close ( ) ; } } return indexes ;
public class OjbTagsHandler { /** * Processes all classes ( flattens the hierarchy such that every class has declarations for all fields , * references , collections that it will have in the descriptor ) and applies modifications ( removes ignored * features , changes declarations ) . * @ return An empty string * @ doc . tag type = " content " */ public String prepare ( ) throws XDocletException { } }
String checkLevel = ( String ) getDocletContext ( ) . getConfigParam ( CONFIG_PARAM_CHECKS ) ; ArrayList queue = new ArrayList ( ) ; ClassDescriptorDef classDef , baseDef ; XClass original ; boolean isFinished ; // determine inheritance relationships for ( Iterator it = _model . getClasses ( ) ; it . hasNext ( ) ; ) { classDef = ( ClassDescriptorDef ) it . next ( ) ; original = classDef . getOriginalClass ( ) ; isFinished = false ; queue . clear ( ) ; while ( ! isFinished ) { if ( original == null ) { isFinished = true ; for ( Iterator baseIt = queue . iterator ( ) ; baseIt . hasNext ( ) ; ) { original = ( XClass ) baseIt . next ( ) ; baseDef = _model . getClass ( original . getQualifiedName ( ) ) ; baseIt . remove ( ) ; if ( baseDef != null ) { classDef . addDirectBaseType ( baseDef ) ; } else { isFinished = false ; break ; } } } if ( ! isFinished ) { if ( original . getInterfaces ( ) != null ) { for ( Iterator baseIt = original . getInterfaces ( ) . iterator ( ) ; baseIt . hasNext ( ) ; ) { queue . add ( baseIt . next ( ) ) ; } } if ( original . getSuperclass ( ) != null ) { queue . add ( original . getSuperclass ( ) ) ; } original = null ; } } } try { _model . process ( ) ; _model . checkConstraints ( checkLevel ) ; } catch ( ConstraintException ex ) { throw new XDocletException ( ex . getMessage ( ) ) ; } return "" ;
public class image { /** * Get the screen height . * @ param context * @ return the screen height */ @ SuppressWarnings ( "deprecation" ) @ SuppressLint ( "NewApi" ) public static int getScreenHeight ( Activity context ) { } }
Display display = context . getWindowManager ( ) . getDefaultDisplay ( ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . HONEYCOMB_MR2 ) { Point size = new Point ( ) ; display . getSize ( size ) ; return size . y ; } return display . getHeight ( ) ;
public class Token { /** * Returns whether a specified character is the token symbol . * @ param c a character * @ return true , if a specified character is one of the token symbols */ public static boolean isTokenSymbol ( char c ) { } }
return ( c == BEAN_SYMBOL || c == TEMPLATE_SYMBOL || c == PARAMETER_SYMBOL || c == ATTRIBUTE_SYMBOL || c == PROPERTY_SYMBOL ) ;
public class ClassDescriptorDef { /** * Checks the constraints on this class . * @ param checkLevel The amount of checks to perform * @ exception ConstraintException If a constraint has been violated */ public void checkConstraints ( String checkLevel ) throws ConstraintException { } }
// now checking constraints FieldDescriptorConstraints fieldConstraints = new FieldDescriptorConstraints ( ) ; ReferenceDescriptorConstraints refConstraints = new ReferenceDescriptorConstraints ( ) ; CollectionDescriptorConstraints collConstraints = new CollectionDescriptorConstraints ( ) ; for ( Iterator it = getFields ( ) ; it . hasNext ( ) ; ) { fieldConstraints . check ( ( FieldDescriptorDef ) it . next ( ) , checkLevel ) ; } for ( Iterator it = getReferences ( ) ; it . hasNext ( ) ; ) { refConstraints . check ( ( ReferenceDescriptorDef ) it . next ( ) , checkLevel ) ; } for ( Iterator it = getCollections ( ) ; it . hasNext ( ) ; ) { collConstraints . check ( ( CollectionDescriptorDef ) it . next ( ) , checkLevel ) ; } new ClassDescriptorConstraints ( ) . check ( this , checkLevel ) ;
public class CertificateChainUtil { /** * Take a chain and return a ( Read - only ) chain with the root certificate as the first entry * @ param chain * a chain with the certificates in order ( either leading away from root or leading towards root ) * @ return a read - only chain leading away from the root certificate * @ throws IllegalArgumentException * if the chain is null or empty */ public static List < X509Certificate > toRootFirst ( List < X509Certificate > chain ) { } }
if ( chain == null || chain . isEmpty ( ) ) throw new IllegalArgumentException ( "Must provide a chain that is non-null and non-empty" ) ; final List < X509Certificate > out ; // Sort the list so the root certificate comes first if ( ! isSelfSigned ( chain . get ( 0 ) ) ) { // Copy the chain List so we can modify it out = new ArrayList < X509Certificate > ( chain ) ; Collections . reverse ( out ) ; // If , even when reversed , the chain doesn ' t have a root at the start then the chain ' s invalid if ( ! isSelfSigned ( out . get ( 0 ) ) ) { throw new IllegalArgumentException ( "Neither end of the certificate chain has a Root! " + chain ) ; } } else { out = chain ; } return Collections . unmodifiableList ( out ) ;
public class DataIO { /** * Print a CSTable to a PrintWriter * @ param table the table to print * @ param out the writer to write to */ public static void print ( CSTable table , PrintWriter out ) { } }
out . println ( TABLE + "," + CSVParser . printLine ( table . getName ( ) ) ) ; for ( String key : table . getInfo ( ) . keySet ( ) ) { out . println ( CSVParser . printLine ( key , table . getInfo ( ) . get ( key ) ) ) ; } if ( table . getColumnCount ( ) < 1 ) { out . flush ( ) ; return ; } out . print ( HEADER ) ; for ( int i = 1 ; i <= table . getColumnCount ( ) ; i ++ ) { out . print ( "," + table . getColumnName ( i ) ) ; } out . println ( ) ; Map < String , String > m = table . getColumnInfo ( 1 ) ; for ( String key : m . keySet ( ) ) { out . print ( key ) ; for ( int i = 1 ; i <= table . getColumnCount ( ) ; i ++ ) { out . print ( "," + table . getColumnInfo ( i ) . get ( key ) ) ; } out . println ( ) ; } for ( String [ ] row : table . rows ( ) ) { for ( int i = 1 ; i < row . length ; i ++ ) { out . print ( "," + row [ i ] ) ; } out . println ( ) ; } out . println ( ) ; out . flush ( ) ;
public class DataStream { /** * Creates a new { @ link ConnectedStreams } by connecting * { @ link DataStream } outputs of ( possible ) different types with each other . * The DataStreams connected using this operator can be used with * CoFunctions to apply joint transformations . * @ param dataStream * The DataStream with which this stream will be connected . * @ return The { @ link ConnectedStreams } . */ public < R > ConnectedStreams < T , R > connect ( DataStream < R > dataStream ) { } }
return new ConnectedStreams < > ( environment , this , dataStream ) ;
public class UpdateDocumentationVersionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateDocumentationVersionRequest updateDocumentationVersionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateDocumentationVersionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDocumentationVersionRequest . getRestApiId ( ) , RESTAPIID_BINDING ) ; protocolMarshaller . marshall ( updateDocumentationVersionRequest . getDocumentationVersion ( ) , DOCUMENTATIONVERSION_BINDING ) ; protocolMarshaller . marshall ( updateDocumentationVersionRequest . getPatchOperations ( ) , PATCHOPERATIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class OffsetDateTime { /** * Returns a copy of this { @ code OffsetDateTime } with the specified number of nanoseconds subtracted . * This instance is immutable and unaffected by this method call . * @ param nanos the nanos to subtract , may be negative * @ return an { @ code OffsetDateTime } based on this date - time with the nanoseconds subtracted , not null * @ throws DateTimeException if the result exceeds the supported date range */ public OffsetDateTime minusNanos ( long nanos ) { } }
return ( nanos == Long . MIN_VALUE ? plusNanos ( Long . MAX_VALUE ) . plusNanos ( 1 ) : plusNanos ( - nanos ) ) ;
public class BaseStatefulWindowedBolt { /** * { @ inheritDoc } */ @ Override public BaseStatefulWindowedBolt < T > withWindow ( Count windowLength , Count slidingInterval ) { } }
super . withWindow ( windowLength , slidingInterval ) ; return this ;
public class AbstractJaxRsWebEndpoint { /** * Configure common endpoint properties * @ param endpointInfo */ protected void configureEndpointInfoProperties ( EndpointInfo libertyEndpointInfo , org . apache . cxf . service . model . EndpointInfo cxfEndpointInfo ) { } }
// Disable jaxb validation event handler , as IBM FastPath does not support this , which will finally fallback to RI unmarshall cxfEndpointInfo . setProperty ( SET_JAXB_VALIDATION_EVENT_HANDLER , false ) ; // / / Set autoRewriteSoapAddressForAllServices with true by default , which will override all the services in the target WSDL file // cxfEndpointInfo . setProperty ( WSDLGetUtils . AUTO _ REWRITE _ ADDRESS _ ALL , true ) ; // Set WSDL _ DESCRIPTION property // try { // String wsdlLocation = libertyEndpointInfo . getWsdlLocation ( ) ; // if ( wsdlLocation ! = null & & ! ( wsdlLocation . isEmpty ( ) ) ) { // URI wsdlDescription = new URI ( wsdlLocation ) ; // cxfEndpointInfo . setProperty ( " URI " , wsdlDescription ) ; // } catch ( URISyntaxException e ) { // / / donothing Map < String , String > endpointProperties = libertyEndpointInfo . getEndpointProperties ( ) ; if ( endpointProperties != null && ! endpointProperties . isEmpty ( ) ) { for ( Entry < String , String > entry : endpointProperties . entrySet ( ) ) { cxfEndpointInfo . setProperty ( entry . getKey ( ) , entry . getValue ( ) ) ; } }
public class XDMClientChildSbb { /** * ( non - Javadoc ) * @ see * org . restcomm . slee . enabler . xdmc . XDMClientControl # putIfMatch ( java . net . URI , * java . lang . String , java . lang . String , byte [ ] , * Credentials ) */ public void putIfMatch ( URI uri , String eTag , String mimetype , byte [ ] content , Credentials credentials ) throws IOException { } }
putIfMatch ( uri , eTag , mimetype , content , null , credentials ) ;
public class InternalUtilities { /** * getJavaTargetVersionFromPom , Returns a string with the java " target " version , as it was * specified in the pom file at compile time . */ public static String getJavaTargetVersionFromPom ( ) { } }
try { Properties properties = new Properties ( ) ; ClassLoader classLoader = ClassLoader . getSystemClassLoader ( ) ; properties . load ( classLoader . getResourceAsStream ( "project.properties" ) ) ; return "" + properties . getProperty ( "targetJavaVersion" ) ; } catch ( Exception ex ) { return "" ; }
public class RectifyImageOps { /** * Creates a transform that applies rectification to unrectified distorted pixels and outputs * normalized pixel coordinates . * @ param param Intrinsic parameters . * @ param rectify Transform for rectifying the image . * @ param rectifyK Camera calibration matrix after rectification * @ return Transform from unrectified to rectified normalized pixels */ public static Point2Transform2_F32 transformPixelToRectNorm ( CameraPinholeBrown param , FMatrixRMaj rectify , FMatrixRMaj rectifyK ) { } }
return ImplRectifyImageOps_F32 . transformPixelToRectNorm ( param , rectify , rectifyK ) ;
public class OutlookMessage { /** * Sets the name / value pair in the { @ link # properties } map . Some properties are put into special attributes ( e . g . , { @ link # toEmail } when the property name * is ' 0076 ' ) . */ public void setProperty ( final OutlookMessageProperty msgProp ) { } }
final String name = msgProp . getClazz ( ) ; final Object value = msgProp . getData ( ) ; if ( name == null || value == null ) { return ; } // Most fields expect a String representation of the value final String stringValue = convertValueToString ( value ) ; int mapiClass = - 1 ; try { mapiClass = Integer . parseInt ( name , 16 ) ; } catch ( final NumberFormatException e ) { LOGGER . trace ( "Unexpected type: {}" , name , e ) ; } switch ( mapiClass ) { case 0x1a : // MESSAGE CLASS setMessageClass ( stringValue ) ; break ; case 0x1035 : setMessageId ( stringValue ) ; break ; case 0x37 : // SUBJECT case 0xe1d : // NORMALIZED SUBJECT setSubject ( stringValue ) ; break ; case 0xc1f : // SENDER EMAIL ADDRESS case 0x65 : // SENT REPRESENTING EMAIL ADDRESS case 0x3ffa : // LAST MODIFIER NAME case 0x800d : case 0x8008 : setFromEmail ( stringValue ) ; break ; case 0x42 : // SENT REPRESENTING NAME setFromName ( stringValue ) ; break ; case 0x76 : // RECEIVED BY EMAIL ADDRESS setToEmail ( stringValue , true ) ; break ; case 0x8000 : setToEmail ( stringValue ) ; break ; case 0x3001 : // DISPLAY NAME setToName ( stringValue ) ; break ; case 0xe04 : // DISPLAY TO setDisplayTo ( stringValue ) ; break ; case 0xe03 : // DISPLAY CC setDisplayCc ( stringValue ) ; break ; case 0xe02 : // DISPLAY BCC setDisplayBcc ( stringValue ) ; break ; case 0x1013 : // HTML setBodyHTML ( stringValue ) ; break ; case 0x1000 : // BODY setBodyText ( stringValue ) ; break ; case 0x1009 : // RTF COMPRESSED setBodyRTF ( value ) ; break ; case 0x7d : // TRANSPORT MESSAGE HEADERS setHeaders ( stringValue ) ; break ; case 0x3007 : // CREATION TIME setCreationDate ( stringValue ) ; break ; case 0x3008 : // LAST MODIFICATION TIME setLastModificationDate ( stringValue ) ; break ; case 0x39 : // CLIENT SUBMIT TIME setClientSubmitTime ( stringValue ) ; break ; case 0x8005 : // S / MIME details setSmime ( stringValue ) ; break ; } // save all properties ( incl . those identified above ) properties . put ( mapiClass , value ) ; checkToRecipient ( ) ; // other possible values ( some are duplicates ) // 0044 : recv name // 004d : author // 0050 : reply // 005a : sender // 0065 : sent email // 0076 : received email // 0078 : repr . email // 0c1a : sender name // 0e04 : to // 0e1d : subject normalized // 1046 : sender email // 3003 : email address // 1008 rtf sync
public class ConfigRESTHandler { /** * Validates configuration of a resource and returns the result as a JSON object . * @ param uid unique identifier . * @ param config configuration of a resource instance . * @ param processed configurations that have already been processed - - to prevent stack overflow from circular dependencies in errant config . * @ return JSON representing the configuration . Null if not an external configuration element . * @ throws IOException */ private JSONObject getConfigInfo ( String uid , Dictionary < String , Object > config , Set < String > processed ) throws IOException { } }
String configDisplayId = ( String ) config . get ( "config.displayId" ) ; boolean isFactoryPid ; String configElementName ; if ( isFactoryPid = configDisplayId . endsWith ( "]" ) ) { // factory pid int end = configDisplayId . lastIndexOf ( '[' ) ; int begin = configDisplayId . lastIndexOf ( '/' , end ) + 1 ; configElementName = configDisplayId . substring ( begin , end ) ; } else configElementName = configDisplayId ; // singleton pid if ( configElementName . indexOf ( '.' ) >= 0 && ! configElementName . startsWith ( "properties." ) ) return null ; // Get pid to use with config service String servicePid = isFactoryPid ? ( String ) config . get ( "service.factoryPid" ) : ( String ) config . get ( "service.pid" ) ; String extendsSourcePid = isFactoryPid ? ( String ) config . get ( "ibm.extends.source.factoryPid" ) : ( String ) config . get ( "ibm.extends.source.pid" ) ; String metaTypeElementName = configHelper . getMetaTypeElementName ( extendsSourcePid == null ? servicePid : extendsSourcePid ) ; // if the element ' s name is internal , no config should be added for that element if ( metaTypeElementName != null && metaTypeElementName . equalsIgnoreCase ( "internal" ) ) return null ; JSONObject json = new OrderedJSONObject ( ) ; json . put ( "configElementName" , configElementName ) ; if ( isFactoryPid ) json . put ( "uid" , uid ) ; if ( ! processed . add ( configDisplayId ) ) { json . put ( "error" , "Circular dependency in configuration." ) ; return json ; } boolean registryEntryExists = configHelper . registryEntryExists ( servicePid ) ; // Mapping of flat config prefix ( like properties . 0 ) to map of flattened config prop names / values SortedMap < String , SortedMap < String , Object > > flattened = new TreeMap < String , SortedMap < String , Object > > ( ) ; // Mapping of pid to list of flat config prefixes which are of that pid type SortedMap < String , SortedSet < String > > flattenedPids = new TreeMap < String , SortedSet < String > > ( ) ; // TODO app defined resources SortedSet < String > keys = new TreeSet < String > ( ) ; for ( java . util . Enumeration < String > en = config . keys ( ) ; en . hasMoreElements ( ) ; ) { String key = en . nextElement ( ) ; // don ' t display items starting with config . or service . or ibm . extends ( added by config service ) if ( key . startsWith ( "config." ) || key . startsWith ( "service." ) || key . startsWith ( "ibm.extends" ) ) { continue ; } String metaTypeName = configHelper . getMetaTypeAttributeName ( extendsSourcePid == null ? servicePid : extendsSourcePid , key ) ; if ( "id" . equals ( key ) && "library" . equals ( configElementName ) ) { // Work around the < library > element marking its id attribute as internal when its // id is actually a configurable external . keys . add ( key ) ; } else if ( ( metaTypeName != null && ! metaTypeName . equalsIgnoreCase ( "internal" ) ) || ! registryEntryExists ) { // add attributes with a name that is not internal or any attributes if there is an error in the config keys . add ( key ) ; } else { int prefixEnd = - 1 ; StringBuilder prefix = new StringBuilder ( ) ; String suffix = key ; // flat config prefixes should match [ < anything > . < numbers > . ] + // For example : child . 0 . grandchild . 0 . value prefix = " child . 0 . grandchild . 0 . " suffix = " value " while ( ( prefixEnd = nthIndexOf ( suffix , "." , 2 ) + 1 ) > 0 && suffix . length ( ) >= prefixEnd ) { String possiblePrefix = suffix . substring ( 0 , prefixEnd ) ; if ( ! possiblePrefix . matches ( ".*\\.\\d+\\." ) ) break ; prefix . append ( possiblePrefix ) ; suffix = suffix . substring ( prefixEnd ) ; } if ( prefix . length ( ) > 0 ) { // It is probably a flattened config attribute if ( "config.referenceType" . equals ( suffix ) ) { String flattenedPid = ( String ) config . get ( key ) ; SortedSet < String > f = flattenedPids . get ( flattenedPid ) ; if ( f == null ) flattenedPids . put ( flattenedPid , f = new TreeSet < String > ( ) ) ; f . add ( prefix . toString ( ) ) ; } else { // It is probably a flattened config attribute . SortedMap < String , Object > f = flattened . get ( prefix . toString ( ) ) ; if ( f == null ) flattened . put ( prefix . toString ( ) , f = new TreeMap < String , Object > ( ) ) ; f . put ( suffix , config . get ( key ) ) ; } } else { // Add MetaTypes with null name . This includes child first config and invalid config . if ( metaTypeName == null ) { keys . add ( key ) ; } } } } // Look for child - first nested config elements . // This style of config has been discontinued but is still in use by some existing config elements , // which need to be handled specially here . if ( "resourceAdapter" . equals ( configElementName ) ) { String childFirstFilter = "(config.parentPID=" + config . get ( "service.pid" ) + ')' ; Configuration [ ] childFirstConfigs ; try { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "child first pid filter" , childFirstFilter ) ; childFirstConfigs = configAdmin . listConfigurations ( childFirstFilter ) ; } catch ( InvalidSyntaxException x ) { throw new RuntimeException ( x ) ; } if ( childFirstConfigs != null ) for ( Configuration c : childFirstConfigs ) { Dictionary < String , Object > props = c . getProperties ( ) ; String childConfigDisplayId = ( String ) props . get ( "config.displayId" ) ; int start = configDisplayId . length ( ) + 1 ; String childElementName = childConfigDisplayId . substring ( start , childConfigDisplayId . indexOf ( '[' , start ) ) ; keys . add ( childElementName ) ; config . put ( childElementName , Collections . singleton ( props . get ( "service.pid" ) ) ) ; } } // These properties intentionally placed first if ( keys . remove ( "id" ) ) { String id = ( String ) config . get ( "id" ) ; if ( ! isGenerated ( id ) ) json . put ( "id" , id ) ; } if ( keys . remove ( "jndiName" ) ) json . put ( "jndiName" , config . get ( "jndiName" ) ) ; if ( ! registryEntryExists ) { // registry entry doesn ' t exist - config service can ' t find the specified pid json . put ( "error" , "Check that the spelling is correct and that the right features are enabled for this configuration." ) ; } for ( String key : keys ) json . put ( key , getJSONValue ( config . get ( key ) , processed ) ) ; for ( Map . Entry < String , SortedSet < String > > entry : flattenedPids . entrySet ( ) ) { String pid = entry . getKey ( ) ; boolean registryEntryExistsForFlattenedConfig = configHelper . registryEntryExists ( pid ) ; JSONArray list = new JSONArray ( ) ; String prefix = null ; for ( String flatConfigPrefix : entry . getValue ( ) ) { JSONObject j = new OrderedJSONObject ( ) ; SortedMap < String , Object > flattenedConfigProps = flattened . get ( prefix = flatConfigPrefix ) ; if ( flattenedConfigProps != null ) for ( Map . Entry < String , Object > prop : flattenedConfigProps . entrySet ( ) ) { String key = prop . getKey ( ) ; String metaTypeName = configHelper . getMetaTypeAttributeName ( pid , key ) ; if ( metaTypeName == null // add unknown attributes added by the user || ! metaTypeName . equalsIgnoreCase ( "internal" ) // add externalized attributes || ! registryEntryExistsForFlattenedConfig ) { // or all attributes if there is an error in the config j . put ( key , getJSONValue ( prop . getValue ( ) , processed ) ) ; } } list . add ( j ) ; } // TODO would be better to get the flattened config element name from config internals rather than hardcoding / approximating it String name = ( String ) config . get ( prefix + ".resourceAdapterConfig.id" ) ; if ( name == null ) { String baseAlias = prefix . replaceAll ( "\\.\\d+\\." , "" ) ; name = configHelper . aliasFor ( pid , baseAlias ) ; } json . put ( name , list ) ; } // API for this configuration element instance if ( servicePid != null ) { ServiceReference < ? > [ ] refs ; String filter = FilterUtils . createPropertyFilter ( "com.ibm.wsspi.rest.handler.config.pid" , servicePid ) ; try { refs = context . getBundleContext ( ) . getServiceReferences ( ( String ) null , filter ) ; } catch ( InvalidSyntaxException x ) { refs = null ; } if ( refs != null ) { SortedSet < String > apiRoots = new TreeSet < String > ( ) ; for ( ServiceReference < ? > ref : refs ) { String root = ( String ) ref . getProperty ( RESTHandler . PROPERTY_REST_HANDLER_ROOT ) ; if ( root != null ) apiRoots . add ( root ) ; } JSONArray api = new JSONArray ( ) ; for ( String root : apiRoots ) { StringBuilder r = new StringBuilder ( "/ibm/api" ) ; if ( root . charAt ( 0 ) != '/' ) r . append ( '/' ) ; r . append ( root ) ; if ( root . charAt ( root . length ( ) - 1 ) != '/' ) r . append ( '/' ) ; r . append ( configElementName ) ; if ( isFactoryPid ) r . append ( '/' ) . append ( URLEncoder . encode ( uid , "UTF-8" ) ) ; api . add ( r . toString ( ) ) ; } if ( ! api . isEmpty ( ) ) json . put ( "api" , api ) ; } } return json ;
public class ResultSetMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ResultSetMetadata resultSetMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( resultSetMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resultSetMetadata . getColumnInfo ( ) , COLUMNINFO_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class P2sVpnServerConfigurationsInner { /** * Creates a P2SVpnServerConfiguration to associate with a VirtualWan if it doesn ' t exist else updates the existing P2SVpnServerConfiguration . * @ param resourceGroupName The resource group name of the VirtualWan . * @ param virtualWanName The name of the VirtualWan . * @ param p2SVpnServerConfigurationName The name of the P2SVpnServerConfiguration . * @ param p2SVpnServerConfigurationParameters Parameters supplied to create or Update a P2SVpnServerConfiguration . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the P2SVpnServerConfigurationInner object */ public Observable < ServiceResponse < P2SVpnServerConfigurationInner > > beginCreateOrUpdateWithServiceResponseAsync ( String resourceGroupName , String virtualWanName , String p2SVpnServerConfigurationName , P2SVpnServerConfigurationInner p2SVpnServerConfigurationParameters ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( virtualWanName == null ) { throw new IllegalArgumentException ( "Parameter virtualWanName is required and cannot be null." ) ; } if ( p2SVpnServerConfigurationName == null ) { throw new IllegalArgumentException ( "Parameter p2SVpnServerConfigurationName is required and cannot be null." ) ; } if ( p2SVpnServerConfigurationParameters == null ) { throw new IllegalArgumentException ( "Parameter p2SVpnServerConfigurationParameters is required and cannot be null." ) ; } Validator . validate ( p2SVpnServerConfigurationParameters ) ; final String apiVersion = "2018-08-01" ; return service . beginCreateOrUpdate ( this . client . subscriptionId ( ) , resourceGroupName , virtualWanName , p2SVpnServerConfigurationName , apiVersion , p2SVpnServerConfigurationParameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < P2SVpnServerConfigurationInner > > > ( ) { @ Override public Observable < ServiceResponse < P2SVpnServerConfigurationInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < P2SVpnServerConfigurationInner > clientResponse = beginCreateOrUpdateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class HttpRequestExecutor { /** * Creates and returns a GET { @ link HttpRequest } instance * for the given < tt > path < / tt > over the internal hostname . * @ param path Path of the request to build . * @ return Built GET request . * @ throws IOException If any error occurs while creating the GET request . */ public HttpRequest getRequest ( final String path ) throws IOException { } }
final GenericUrl url = getURL ( path ) ; return requestFactory . buildGetRequest ( url ) ;
public class BigRational { /** * Calculates the division ( / ) of this rational number and the specified argument . * < p > This is functionally identical to * < code > this . divide ( BigRational . valueOf ( value ) ) < / code > * but slightly faster . < / p > * < p > The result has no loss of precision . < / p > * @ param value the { @ link BigInteger } to divide ( 0 is not allowed ) * @ return the resulting rational number * @ throws ArithmeticException if the argument is 0 ( division by zero ) */ public BigRational divide ( BigInteger value ) { } }
if ( value . equals ( BigInteger . ONE ) ) { return this ; } return divide ( new BigDecimal ( value ) ) ;
public class VocabConstructor { /** * This method transfers existing WordVectors model into current one * @ param wordVectors * @ return */ @ SuppressWarnings ( "unchecked" ) // method is safe , since all calls inside are using generic SequenceElement methods public VocabCache < T > buildMergedVocabulary ( @ NonNull WordVectors wordVectors , boolean fetchLabels ) { } }
return buildMergedVocabulary ( ( VocabCache < T > ) wordVectors . vocab ( ) , fetchLabels ) ;
public class DTMDefaultBase { /** * Get the parent for the given node identity . * @ param identity The node identity . * @ return The parent identity , or DTM . NULL . */ protected int _parent ( int identity ) { } }
if ( identity < m_size ) return m_parent . elementAt ( identity ) ; // Check to see if the information requested has been processed , and , // if not , advance the iterator until we the information has been // processed . while ( true ) { boolean isMore = nextNode ( ) ; if ( identity >= m_size && ! isMore ) return NULL ; else if ( identity < m_size ) return m_parent . elementAt ( identity ) ; }
public class StackdriverExporter { /** * Registers the { @ code StackdriverExporter } . * @ param spanExporter the instance of the { @ code SpanExporter } where this service is registered . */ @ VisibleForTesting static void register ( SpanExporter spanExporter , Handler handler ) { } }
StackdriverTraceExporter . register ( spanExporter , handler ) ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setBPSimData ( BPSimDataType newBPSimData ) { } }
( ( FeatureMap . Internal ) getMixed ( ) ) . set ( BpsimPackage . Literals . DOCUMENT_ROOT__BP_SIM_DATA , newBPSimData ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcProjectOrderRecordTypeEnum ( ) { } }
if ( ifcProjectOrderRecordTypeEnumEEnum == null ) { ifcProjectOrderRecordTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 874 ) ; } return ifcProjectOrderRecordTypeEnumEEnum ;
public class MurmurHash3 { /** * Gets a long from the given byte array starting at the given byte array index and continuing for * remainder ( rem ) bytes . The bytes are extracted in little - endian order . There is no limit * checking . * @ param bArr The given input byte array . * @ param index Zero - based index from the start of the byte array . * @ param rem Remainder bytes . An integer in the range [ 1,8 ] . * @ return long */ private static long getLong ( final byte [ ] bArr , final int index , final int rem ) { } }
long out = 0L ; for ( int i = rem ; i -- > 0 ; ) { // i = 7,6,5,4,3,2,1,0 final byte b = bArr [ index + i ] ; out ^= ( b & 0xFFL ) << ( i * 8 ) ; // equivalent to | = } return out ;