signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Validators { /** * Determine if the input parameter is a Email . if yes , the check passes . * @ param msg error message after verification failed * @ return Validation */ public static Validation < String > isEmail ( String msg ) { } }
return notEmpty ( ) . and ( SimpleValidation . from ( PatternKit :: isEmail , msg ) ) ;
public class DeleteMountTargetRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteMountTargetRequest deleteMountTargetRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteMountTargetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteMountTargetRequest . getMountTargetId ( ) , MOUNTTARGETID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PhoneNumberUtil { /** * format phone number in E123 national format with cursor position handling . * @ param pphoneNumberData phone number to format with cursor position * @ return formated phone number as String with new cursor position */ public final ValueWithPos < String > formatE123NationalWithPos ( final ValueWithPos < PhoneNumberData > pphoneNumberData ) { } }
if ( pphoneNumberData == null ) { return null ; } int cursor = pphoneNumberData . getPos ( ) ; final StringBuilder resultNumber = new StringBuilder ( ) ; if ( isPhoneNumberNotEmpty ( pphoneNumberData . getValue ( ) ) ) { PhoneCountryData phoneCountryData = null ; for ( final PhoneCountryCodeData country : CreatePhoneCountryConstantsClass . create ( ) . countryCodeData ( ) ) { if ( StringUtils . equals ( country . getCountryCode ( ) , pphoneNumberData . getValue ( ) . getCountryCode ( ) ) ) { phoneCountryData = country . getPhoneCountryData ( ) ; break ; } } if ( phoneCountryData == null ) { return this . formatE123InternationalWithPos ( pphoneNumberData ) ; } if ( cursor > 0 ) { cursor -= StringUtils . length ( pphoneNumberData . getValue ( ) . getCountryCode ( ) ) ; cursor += StringUtils . length ( phoneCountryData . getTrunkCode ( ) ) ; } cursor ++ ; resultNumber . append ( '(' ) . append ( phoneCountryData . getTrunkCode ( ) ) ; if ( StringUtils . isNotBlank ( pphoneNumberData . getValue ( ) . getAreaCode ( ) ) ) { resultNumber . append ( pphoneNumberData . getValue ( ) . getAreaCode ( ) ) ; } if ( resultNumber . length ( ) <= cursor ) { cursor += 2 ; } resultNumber . append ( ") " ) ; resultNumber . append ( pphoneNumberData . getValue ( ) . getLineNumber ( ) ) ; if ( StringUtils . isNotBlank ( pphoneNumberData . getValue ( ) . getExtension ( ) ) ) { if ( resultNumber . length ( ) <= cursor ) { cursor ++ ; } resultNumber . append ( ' ' ) ; resultNumber . append ( pphoneNumberData . getValue ( ) . getExtension ( ) ) ; } } return new ValueWithPos < > ( StringUtils . trimToNull ( resultNumber . toString ( ) ) , cursor ) ;
public class MonotonicLaxImmutableMapBuilder { /** * See { @ link ImmutableMap . Builder # orderEntriesByValue ( Comparator ) } */ @ Override public LaxImmutableMapBuilder < K , V > orderEntriesByValue ( Comparator < ? super V > valueComparator ) { } }
innerBuilder . orderEntriesByValue ( valueComparator ) ; return this ;
public class VecUtils { /** * Create a new { @ link Vec } of categorical values from string { @ link Vec } . * FIXME : implement in more efficient way with Brandon ' s primitives for BufferedString manipulation * @ param vec a string { @ link Vec } * @ return a categorical { @ link Vec } */ public static Vec stringToCategorical ( Vec vec ) { } }
final String [ ] vecDomain = new CollectStringVecDomain ( ) . domain ( vec ) ; MRTask task = new MRTask ( ) { transient private java . util . HashMap < String , Integer > lookupTable ; @ Override protected void setupLocal ( ) { lookupTable = new java . util . HashMap < > ( vecDomain . length ) ; for ( int i = 0 ; i < vecDomain . length ; i ++ ) { // FIXME : boxing lookupTable . put ( vecDomain [ i ] , i ) ; } } @ Override public void map ( Chunk c , NewChunk nc ) { BufferedString bs = new BufferedString ( ) ; for ( int row = 0 ; row < c . len ( ) ; row ++ ) { if ( c . isNA ( row ) ) { nc . addNA ( ) ; } else { c . atStr ( bs , row ) ; String strRepresentation = bs . toString ( ) ; if ( strRepresentation . contains ( "\uFFFD" ) ) { nc . addNum ( lookupTable . get ( bs . toSanitizedString ( ) ) , 0 ) ; } else { nc . addNum ( lookupTable . get ( strRepresentation ) , 0 ) ; } } } } } ; // Invoke tasks - one input vector , one ouput vector task . doAll ( new byte [ ] { Vec . T_CAT } , vec ) ; // Return result return task . outputFrame ( null , null , new String [ ] [ ] { vecDomain } ) . vec ( 0 ) ;
public class RTMPHandshake { /** * RTMPE type 8 uses XTEA on the regular signature http : / / en . wikipedia . org / wiki / XTEA * @ param array array to get signature * @ param offset offset to start from * @ param keyid ID of XTEA key */ public final static void getXteaSignature ( byte [ ] array , int offset , int keyid ) { } }
int num_rounds = 32 ; int v0 , v1 , sum = 0 , delta = 0x9E3779B9 ; int [ ] k = XTEA_KEYS [ keyid ] ; v0 = ByteBuffer . wrap ( array , offset , 4 ) . getInt ( ) ; v1 = ByteBuffer . wrap ( array , offset + 4 , 4 ) . getInt ( ) ; for ( int i = 0 ; i < num_rounds ; i ++ ) { v0 += ( ( ( v1 << 4 ) ^ ( v1 >> 5 ) ) + v1 ) ^ ( sum + k [ sum & 3 ] ) ; sum += delta ; v1 += ( ( ( v0 << 4 ) ^ ( v0 >> 5 ) ) + v0 ) ^ ( sum + k [ ( sum >> 11 ) & 3 ] ) ; } ByteBuffer tmp = ByteBuffer . allocate ( 4 ) ; tmp . putInt ( v0 ) ; tmp . flip ( ) ; System . arraycopy ( tmp . array ( ) , 0 , array , offset , 4 ) ; tmp . clear ( ) ; tmp . putInt ( v1 ) ; tmp . flip ( ) ; System . arraycopy ( tmp . array ( ) , 0 , array , offset + 4 , 4 ) ;
public class StringExpression { /** * Create a { @ code this . toUpperCase ( ) } expression * < p > Get the upper case form < / p > * @ return this . toUpperCase ( ) * @ see java . lang . String # toUpperCase ( ) */ public StringExpression upper ( ) { } }
if ( upper == null ) { upper = Expressions . stringOperation ( Ops . UPPER , mixin ) ; } return upper ;
public class ManagedClustersInner { /** * Gets upgrade profile for a managed cluster . * Gets the details of the upgrade profile for a managed cluster with a specified resource group and name . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ManagedClusterUpgradeProfileInner > getUpgradeProfileAsync ( String resourceGroupName , String resourceName , final ServiceCallback < ManagedClusterUpgradeProfileInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getUpgradeProfileWithServiceResponseAsync ( resourceGroupName , resourceName ) , serviceCallback ) ;
public class RequestParameterBuilder { /** * Adds a request parameter to the URL . This is a convenient method for primitive , plain data types . Parameter ' s value will not be converted to JSON * notation when adding . It will be only encoded according to the acquired encoding . Note : null values will not be added . * @ param name name of the request parameter * @ param value value of the request parameter * @ return RequestParameterBuilder updated this instance which can be reused * @ throws UnsupportedEncodingException DOCUMENT _ ME */ public RequestParameterBuilder param ( String name , Object value ) throws UnsupportedEncodingException { } }
String encodedValue = encode ( value ) ; if ( encodedValue == null ) { return this ; } if ( added || originalUrl . contains ( "?" ) ) { buffer . append ( "&" ) ; } else { buffer . append ( "?" ) ; } buffer . append ( name ) ; buffer . append ( "=" ) ; buffer . append ( encodedValue ) ; // set a flag that at least one request parameter was added added = true ; return this ;
public class Assert { /** * Asserts that the condition is { @ literal false } . * The assertion holds if and only if the value is equal to { @ literal false } . * @ param condition { @ link Boolean } value being evaluated . * @ param message { @ link Supplier } containing the message used in the { @ link IllegalArgumentException } thrown * if the assertion fails . * @ throws java . lang . IllegalArgumentException if the value is not { @ literal false } . * @ see java . lang . Boolean # FALSE */ public static void isFalse ( Boolean condition , Supplier < String > message ) { } }
if ( isNotFalse ( condition ) ) { throw new IllegalArgumentException ( message . get ( ) ) ; }
public class PolynomialApproximation { /** * Returns the function value of the polynomial approximation * at the specified k . * @ param k the value for which the polynomial approximation should be returned * @ return the function value of the polynomial approximation * at the specified k */ public double getValueAt ( int k ) { } }
double result = 0. ; double log_k = FastMath . log ( k ) , acc = 1. ; for ( int p = 0 ; p < b . length ; p ++ ) { result += b [ p ] * acc ; acc *= log_k ; } return result ;
public class DefaultGroovyMethods { /** * Convenience method to dynamically create a new instance of this * class . Calls the default constructor . * @ param c a class * @ return a new instance of this class * @ since 1.0 */ @ SuppressWarnings ( "unchecked" ) public static < T > T newInstance ( Class < T > c ) { } }
return ( T ) InvokerHelper . invokeConstructorOf ( c , null ) ;
public class CmisConnector { /** * Defines node type for the repository info . * @ param typeManager JCR node type manager . * @ throws RepositoryException */ @ SuppressWarnings ( "unchecked" ) private void registerRepositoryInfoType ( NodeTypeManager typeManager ) throws RepositoryException { } }
// create node type template NodeTypeTemplate type = typeManager . createNodeTypeTemplate ( ) ; // convert CMIS type ' s attributes to node type template we have just created type . setName ( "cmis:repository" ) ; type . setAbstract ( false ) ; type . setMixin ( false ) ; type . setOrderableChildNodes ( true ) ; type . setQueryable ( true ) ; type . setDeclaredSuperTypeNames ( new String [ ] { JcrConstants . NT_FOLDER } ) ; PropertyDefinitionTemplate vendorName = typeManager . createPropertyDefinitionTemplate ( ) ; vendorName . setAutoCreated ( false ) ; vendorName . setName ( "cmis:vendorName" ) ; vendorName . setMandatory ( false ) ; type . getPropertyDefinitionTemplates ( ) . add ( vendorName ) ; PropertyDefinitionTemplate productName = typeManager . createPropertyDefinitionTemplate ( ) ; productName . setAutoCreated ( false ) ; productName . setName ( "cmis:productName" ) ; productName . setMandatory ( false ) ; type . getPropertyDefinitionTemplates ( ) . add ( productName ) ; PropertyDefinitionTemplate productVersion = typeManager . createPropertyDefinitionTemplate ( ) ; productVersion . setAutoCreated ( false ) ; productVersion . setName ( "cmis:productVersion" ) ; productVersion . setMandatory ( false ) ; type . getPropertyDefinitionTemplates ( ) . add ( productVersion ) ; // register type NodeTypeDefinition [ ] nodeDefs = new NodeTypeDefinition [ ] { type } ; typeManager . registerNodeTypes ( nodeDefs , true ) ;
public class Iterate { /** * Add all elements from the source Iterable to the target collection , return the target collection . */ public static < T , R extends Collection < T > > R addAllTo ( Iterable < ? extends T > iterable , R targetCollection ) { } }
Iterate . addAllIterable ( iterable , targetCollection ) ; return targetCollection ;
public class InstanceIdentityProvider { /** * Shortcut to { @ link KeyPair # getPublic ( ) } . * @ return the public key . { @ code null } if { @ link # getKeyPair ( ) } is { @ code null } . */ @ SuppressWarnings ( "unchecked" ) @ CheckForNull protected PUB getPublicKey ( ) { } }
KeyPair keyPair = getKeyPair ( ) ; return keyPair == null ? null : ( PUB ) keyPair . getPublic ( ) ;
public class MetadataService { /** * Updates a { @ link ProjectRole } for the { @ link Token } of the specified { @ code appId } . */ public CompletableFuture < Revision > updateTokenRole ( Author author , String projectName , Token token , ProjectRole role ) { } }
requireNonNull ( author , "author" ) ; requireNonNull ( projectName , "projectName" ) ; requireNonNull ( token , "token" ) ; requireNonNull ( role , "role" ) ; final TokenRegistration registration = new TokenRegistration ( token . appId ( ) , role , UserAndTimestamp . of ( author ) ) ; final JsonPointer path = JsonPointer . compile ( "/tokens" + encodeSegment ( registration . id ( ) ) ) ; final Change < JsonNode > change = Change . ofJsonPatch ( METADATA_JSON , new ReplaceOperation ( path , Jackson . valueToTree ( registration ) ) . toJsonNode ( ) ) ; final String commitSummary = "Update the role of a token '" + token . appId ( ) + "' as '" + role + "' for the project " + projectName ; return metadataRepo . push ( projectName , Project . REPO_DOGMA , author , commitSummary , change ) ;
public class EbeanQueryChannelService { /** * Return a ExpressionList specifying propertyName equals value . * @ param expressionList the ExpressionList to add contains expression * @ param propertyName the property name of entity bean . * @ param value equals value * @ param < T > the type of entity . * @ return a ExpressionList specifying propertyName equals value . */ public static < T > ExpressionList < T > eqIfNotNull ( ExpressionList < T > expressionList , String propertyName , Object value ) { } }
Assert . notNull ( expressionList , "expressionList must not null" ) ; Assert . hasText ( propertyName , "propertyName must not null" ) ; if ( value != null ) { return expressionList . eq ( propertyName , value ) ; } return expressionList ;
public class CodeableConceptDt { /** * Gets the value ( s ) for < b > coding < / b > ( ) . * creating it if it does * not exist . Will not return < code > null < / code > . * < b > Definition : < / b > * A reference to a code defined by a terminology system */ public java . util . List < CodingDt > getCoding ( ) { } }
if ( myCoding == null ) { myCoding = new java . util . ArrayList < CodingDt > ( ) ; } return myCoding ;
public class PrivacyListManager { /** * Answer a privacy containing the list structure without { @ link PrivacyItem } . * @ return a Privacy with the list names . * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ private Privacy getPrivacyWithListNames ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
// The request of the list is an empty privacy message Privacy request = new Privacy ( ) ; // Send the package to the server and get the answer return getRequest ( request ) ;
public class AuditData { /** * < pre > * Detailed information about CreateVersion call . * < / pre > * < code > . google . appengine . v1 . CreateVersionMethod create _ version = 2 ; < / code > */ public com . google . appengine . v1 . CreateVersionMethod getCreateVersion ( ) { } }
if ( methodCase_ == 2 ) { return ( com . google . appengine . v1 . CreateVersionMethod ) method_ ; } return com . google . appengine . v1 . CreateVersionMethod . getDefaultInstance ( ) ;
public class GraphEntityMapper { /** * Fetches Non - proxy nodes from index hits */ private Node getNonProxyNode ( IndexHits < Node > nodesFound ) { } }
Node node = null ; if ( nodesFound . hasNext ( ) ) { node = nodesFound . next ( ) ; } else { return null ; } try { Object proxyNodeProperty = node . getProperty ( PROXY_NODE_TYPE_KEY ) ; } catch ( NotFoundException e ) { return node ; } catch ( IllegalStateException e ) { return node ; } return getNonProxyNode ( nodesFound ) ;
public class Debug { public void doGet ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { } }
Page page = new Page ( ) ; page . title ( getServletInfo ( ) ) ; page . attribute ( "text" , "#000000" ) ; page . attribute ( Page . BGCOLOR , "#FFFFFF" ) ; page . attribute ( "link" , "#606CC0" ) ; page . attribute ( "vlink" , "#606CC0" ) ; page . attribute ( "alink" , "#606CC0" ) ; Log l = LogFactory . getLog ( Debug . class ) ; if ( ! ( l instanceof LogImpl ) ) return ; LogImpl log = ( LogImpl ) l ; TableForm tf = new TableForm ( request . getRequestURI ( ) ) ; page . add ( tf ) ; tf . table ( ) . newRow ( ) . addCell ( new Block ( Block . Bold ) . add ( new Font ( 3 , true ) . add ( getServletInfo ( ) ) ) ) . cell ( ) . attribute ( "COLSPAN" , "2" ) ; tf . table ( ) . add ( Break . rule ) ; tf . addCheckbox ( "D" , "Debug On" , log . getDebug ( ) ) ; tf . addTextField ( "V" , "Verbosity Level" , 6 , "" + log . getVerbose ( ) ) ; tf . addTextField ( "P" , "Debug Patterns" , 40 , log . getDebugPatterns ( ) ) ; tf . addCheckbox ( "W" , "Suppress Warnings" , log . getSuppressWarnings ( ) ) ; LogSink [ ] sinks = log . getLogSinks ( ) ; for ( int s = 0 ; sinks != null && s < sinks . length ; s ++ ) { if ( sinks [ s ] == null ) continue ; tf . table ( ) . newRow ( ) . addCell ( Break . rule ) . cell ( ) . attribute ( "COLSPAN" , "2" ) ; tf . table ( ) . newRow ( ) . addCell ( "<B><font size=\"+1\">Log Sink " + s + ":</font></B" ) . right ( ) ; tf . table ( ) . addCell ( sinks [ s ] . getClass ( ) . getName ( ) ) . left ( ) ; tf . addCheckbox ( "LSS" + s , "Started" , sinks [ s ] . isStarted ( ) ) ; if ( sinks [ s ] instanceof OutputStreamLogSink ) { OutputStreamLogSink sink = ( OutputStreamLogSink ) sinks [ s ] ; tf . addCheckbox ( "LT" + s , "Tag" , sink . isLogTags ( ) ) ; tf . addCheckbox ( "LL" + s , "Label" , sink . isLogLabels ( ) ) ; tf . addCheckbox ( "Ls" + s , "Stack Size" , sink . isLogStackSize ( ) ) ; tf . addCheckbox ( "LS" + s , "Stack Trace" , sink . isLogStackTrace ( ) ) ; tf . addCheckbox ( "SS" + s , "Suppress Stacks" , sink . isSuppressStack ( ) ) ; tf . addCheckbox ( "SL" + s , "Single Line" , sink . isLogOneLine ( ) ) ; tf . addTextField ( "LF" + s , "Log File Name" , 40 , sink . getFilename ( ) ) ; } } tf . table ( ) . newRow ( ) . addCell ( Break . rule ) . cell ( ) . attribute ( "COLSPAN" , "2" ) ; tf . addTextField ( "LSC" , "Add LogSink Class" , 40 , "org.browsermob.proxy.jetty.log.OutputStreamLogSink" ) ; tf . addButtonArea ( ) ; tf . addButton ( "Action" , "Set Options" ) ; tf . addButton ( "Action" , "Add LogSink" ) ; tf . addButton ( "Action" , "Delete Stopped Sinks" ) ; tf . table ( ) . newRow ( ) . addCell ( Break . rule ) . cell ( ) . attribute ( "COLSPAN" , "2" ) ; response . setContentType ( "text/html" ) ; response . setHeader ( "Pragma" , "no-cache" ) ; response . setHeader ( "Cache-Control" , "no-cache,no-store" ) ; Writer writer = response . getWriter ( ) ; page . write ( writer ) ; writer . flush ( ) ;
public class ProcessGroovyMethods { /** * Overloads the left shift operator ( & lt ; & lt ; ) to provide an append mechanism * to pipe data to a Process . * @ param self a Process instance * @ param value a value to append * @ return a Writer * @ throws java . io . IOException if an IOException occurs . * @ since 1.0 */ public static Writer leftShift ( Process self , Object value ) throws IOException { } }
return IOGroovyMethods . leftShift ( self . getOutputStream ( ) , value ) ;
public class ServerCacheControl { /** * Parses the specified { @ code " cache - control " } header values into a { @ link ServerCacheControl } . * Note that any unknown directives will be ignored . * @ return the { @ link ServerCacheControl } decoded from the specified header values . */ public static ServerCacheControl parse ( Iterable < String > directives ) { } }
requireNonNull ( directives , "directives" ) ; final ServerCacheControlBuilder builder = new ServerCacheControlBuilder ( ) ; for ( String d : directives ) { parseCacheControl ( d , ( name , value ) -> { final BiConsumer < ServerCacheControlBuilder , String > action = DIRECTIVES . get ( name ) ; if ( action != null ) { action . accept ( builder , value ) ; } } ) ; } return builder . build ( ) ;
public class SecurityPhaseListener { /** * Get the default phases at which restrictions should be applied , by looking for a @ RestrictAtPhase on a matching * @ param viewId * @ return default phases for a view * @ ViewPattern , falling back on global defaults if none are found */ public PhaseIdType [ ] getDefaultPhases ( String viewId ) { } }
PhaseIdType [ ] defaultPhases = null ; RestrictAtPhase restrictAtPhase = viewConfigStore . getAnnotationData ( viewId , RestrictAtPhase . class ) ; if ( restrictAtPhase != null ) { defaultPhases = restrictAtPhase . value ( ) ; } if ( defaultPhases == null ) { defaultPhases = RestrictAtPhaseDefault . DEFAULT_PHASES ; } return defaultPhases ;
public class CmsPushButton { /** * Tells the button to use a minimal width . < p > * @ param useMinWidth < code > true < / code > to use a minimal width */ public void setUseMinWidth ( boolean useMinWidth ) { } }
if ( useMinWidth != m_useMinWidth ) { if ( useMinWidth ) { addStyleName ( I_CmsLayoutBundle . INSTANCE . buttonCss ( ) . cmsMinWidth ( ) ) ; } else { removeStyleName ( I_CmsLayoutBundle . INSTANCE . buttonCss ( ) . cmsMinWidth ( ) ) ; } m_useMinWidth = useMinWidth ; }
public class RestClientUtil { /** * 创建索引文档 , 根据elasticsearch . xml中指定的日期时间格式 , 生成对应时间段的索引表名称 * @ param indexName * @ param bean * @ return * @ throws ElasticSearchException */ public String addDateDocument ( String indexName , Object bean ) throws ElasticSearchException { } }
return addDateDocument ( indexName , _doc , bean ) ;
public class AbstractSimon { /** * Replaces one of the children for a new one ( unknown to concrete ) . Used only internally . * @ param simon original Simon ( unknown ) * @ param newSimon new Simon */ void replaceChild ( Simon simon , AbstractSimon newSimon ) { } }
children . remove ( simon ) ; if ( newSimon != null ) { children . add ( newSimon ) ; newSimon . setParent ( this ) ; }
public class EmbeddedChannel { /** * Writes one message to the inbound of this { @ link Channel } and does not flush it . This * method is conceptually equivalent to { @ link # write ( Object , ChannelPromise ) } . * @ see # writeOneOutbound ( Object , ChannelPromise ) */ public ChannelFuture writeOneInbound ( Object msg , ChannelPromise promise ) { } }
if ( checkOpen ( true ) ) { pipeline ( ) . fireChannelRead ( msg ) ; } return checkException ( promise ) ;
public class AbstractEJBRuntime { /** * Bind the remote interfaces for all beans known to the container . * Intended to be used to restore EJB bindings when the ORB is restarted . */ protected void bindAllRemoteInterfacesToContextRoot ( ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "bindAllRemoteInterfacesToContextRoot" ) ; Map < EJBModuleMetaDataImpl , NameSpaceBinder < ? > > binders = new HashMap < EJBModuleMetaDataImpl , NameSpaceBinder < ? > > ( ) ; // Start with the list of all HomeRecords . Not all of these beans may have // started , but this is the complete set of everything that would have been // bound into some naming context . The container may not be available yet // if the EJB feature is just starting , but then there is nothing to do . HomeOfHomes homeOfHomes = ( ivContainer != null ) ? ivContainer . getHomeOfHomes ( ) : null ; if ( homeOfHomes != null ) { List < HomeRecord > hrs = ivContainer . getHomeOfHomes ( ) . getAllHomeRecords ( ) ; for ( HomeRecord hr : hrs ) { if ( hr . bindToContextRoot ( ) ) { BeanMetaData bmd = hr . getBeanMetaData ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "processing bindings for " + bmd . j2eeName ) ; if ( bmd . homeInterfaceClassName != null ) { bindRemoteInterfaceToContextRoot ( binders , hr , bmd . homeInterfaceClassName , - 1 ) ; } if ( bmd . ivBusinessRemoteInterfaceClassNames != null ) { int interfaceIndex = 0 ; for ( String remoteInterfaceName : bmd . ivBusinessRemoteInterfaceClassNames ) { bindRemoteInterfaceToContextRoot ( binders , hr , remoteInterfaceName , interfaceIndex ++ ) ; } } } } } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "bindAllRemoteInterfacesToContextRoot" ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcFlowInstrumentType ( ) { } }
if ( ifcFlowInstrumentTypeEClass == null ) { ifcFlowInstrumentTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 281 ) ; } return ifcFlowInstrumentTypeEClass ;
public class NewRelicManager { /** * Called after setting configuration properties . * @ param cache The provider cache */ public void initialize ( NewRelicCache cache ) { } }
String apiKey = cache . getApiKey ( ) ; if ( apiKey == null ) throw new IllegalArgumentException ( "null API key" ) ; logger . info ( "Initialising the clients" ) ; initialized = false ; if ( cache . isAlertsEnabled ( ) ) { apiClient = NewRelicApi . builder ( ) . apiKey ( apiKey ) . build ( ) ; infraApiClient = NewRelicInfraApi . builder ( ) . apiKey ( apiKey ) . build ( ) ; syntheticsApiClient = NewRelicSyntheticsApi . builder ( ) . apiKey ( apiKey ) . build ( ) ; } logger . info ( "Initialised the clients" ) ; initialized = true ;
public class AiMaterial { /** * This method is used by JNI , do not call or modify . * @ param type the type * @ param number the number */ @ SuppressWarnings ( "unused" ) private void setTextureNumber ( int type , int number ) { } }
m_numTextures . put ( AiTextureType . fromRawValue ( type ) , number ) ;
public class AbstractThemeWidget { @ Override public void onVisibleChange ( LayerShownEvent event ) { } }
if ( ! themeChange && getActiveViewConfig ( ) != null && ! event . isScaleChange ( ) ) { activateViewConfig ( null ) ; }
public class AreaSizesD { /** * Construct an area at the origin that has the same size as { @ code size } . * @ param size The area size * @ return An area at the origin */ public static AreaD area ( final AreaSizeD size ) { } }
Objects . requireNonNull ( size , "Size" ) ; return AreaD . of ( 0.0 , size . sizeX ( ) , 0.0 , size . sizeY ( ) ) ;
public class OpenALStreamPlayer { /** * Start this stream playing * @ param loop True if the stream should loop * @ throws IOException Indicates a failure to read from the stream */ public void play ( boolean loop ) throws IOException { } }
this . loop = loop ; initStreams ( ) ; done = false ; AL10 . alSourceStop ( source ) ; removeBuffers ( ) ; startPlayback ( ) ;
public class GEARImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . GEAR__DATA : setDATA ( ( byte [ ] ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class LocalUniqueIDGeneratorFactory { /** * Return the UniqueIDGenerator instance for this specific generator - ID , cluster - ID combination . If one was * already created , that is returned . * @ param generatorId Generator ID to use ( 0 ≤ n ≤ 255 ) . * @ param clusterId Cluster ID to use ( 0 ≤ n ≤ 15 ) . * @ param mode Generator mode . * @ return A thread - safe UniqueIDGenerator instance . */ public synchronized static IDGenerator generatorFor ( int generatorId , int clusterId , Mode mode ) { } }
assertParameterWithinBounds ( "generatorId" , 0 , Blueprint . MAX_GENERATOR_ID , generatorId ) ; assertParameterWithinBounds ( "clusterId" , 0 , Blueprint . MAX_CLUSTER_ID , clusterId ) ; String generatorAndCluster = String . format ( "%d_%d" , generatorId , clusterId ) ; if ( ! instances . containsKey ( generatorAndCluster ) ) { GeneratorIdentityHolder identityHolder = LocalGeneratorIdentity . with ( clusterId , generatorId ) ; instances . putIfAbsent ( generatorAndCluster , new BaseUniqueIDGenerator ( identityHolder , mode ) ) ; } return instances . get ( generatorAndCluster ) ;
public class Streams { /** * Reverse a Stream * < pre > * { @ code * assertThat ( Streams . reverse ( Stream . of ( 1,2,3 ) ) . collect ( CyclopsCollectors . toList ( ) ) * , equalTo ( Arrays . asList ( 3,2,1 ) ) ) ; * < / pre > * @ param stream Stream to reverse * @ return Reversed stream */ public static < U > Stream < U > reverse ( final Stream < U > stream ) { } }
return ReactiveSeq . of ( 1 ) . flatMap ( i -> reversedStream ( stream . collect ( java . util . stream . Collectors . toList ( ) ) ) ) ;
public class UserStorageMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UserStorage userStorage , ProtocolMarshaller protocolMarshaller ) { } }
if ( userStorage == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( userStorage . getCapacity ( ) , CAPACITY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TagImpl { /** * For documentation comment with embedded @ link tags , return the array of * TagImpls consisting of SeeTagImpl ( s ) and text containing TagImpl ( s ) . * Within a comment string " This is an example of inline tags for a * documentation comment { @ link Doc { @ link Doc commentlabel } } " , * where inside the inner braces , the first " Doc " carries exctly the same * syntax as a SeeTagImpl and the second " commentlabel " is label for the Html * Link , will return an array of TagImpl ( s ) with first element as TagImpl with * comment text " This is an example of inline tags for a documentation * comment " and second element as SeeTagImpl with referenced class as " Doc " * and the label for the Html Link as " commentlabel " . * @ return TagImpl [ ] Array of tags with inline SeeTagImpls . * @ see ParamTagImpl * @ see ThrowsTagImpl */ public Tag [ ] inlineTags ( ) { } }
if ( inlineTags == null ) { inlineTags = Comment . getInlineTags ( holder , text ) ; } return inlineTags ;
public class EJSContainer { /** * Remove the bean with the given bean home name from * this container . < p > */ private void uninstallBean ( BeanMetaData bmd , boolean terminate ) throws ContainerException { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; // d532639.2 J2EEName beanHomeName = bmd . j2eeName ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "uninstallBean : " + beanHomeName ) ; // 89188 // If the BeanMetaData is not in the internal store , then we only need to // remove the HomeRecord . if ( ! internalBeanMetaDataStore . containsKey ( bmd . j2eeName ) ) { homeOfHomes . removeHome ( bmd ) ; // F743-26072 if ( isTraceOn && tc . isEntryEnabled ( ) ) // d402055 Tr . exit ( tc , "uninstallBean" ) ; // d402055 return ; } // A failure has been reported in the code below that could only occur // if the two BMDs are out of synch , so the following FFDC is designed // to detect that state and collect possibly useful information . PM65053 BeanMetaData ibmd = internalBeanMetaDataStore . get ( bmd . j2eeName ) ; if ( bmd != ibmd ) { Exception isex = new IllegalStateException ( "Mismatch in internal bean metadata : " + bmd . j2eeName ) ; FFDCFilter . processException ( isex , CLASS_NAME + ".uninstallBean" , "1500" , this , new Object [ ] { bmd , ibmd } ) ; } // 479980 : Simply moved the ' try ' to here from down below . This move is necessary to properly handle the case // where a ContainerException is thrown in the next couple lines below . If this exception is going to be thrown , // we should still perform the code contained in the ' finally ' block which corresponds to this ' try ' block . EJSHome home = null ; try // 202018 { // First , remove the home from the HomeOfHomes to prevent any further // lookups of this home , and prevent any of the shutdown of the home // that is about to be perfomed from being undone . For example , the // wrappers should not be put back in the wrapper cache after being // removed below . d547849 home = homeOfHomes . removeHome ( bmd ) ; // F743-26072 // d608829.1 - The home might not exist if a failure occurred before // it was created . Do not throw another ( meaningless ) exception . if ( home != null ) { // PK65102 - Prior to this APAR defect , the call below to ' unregisterHome ' was not performed for // an MDB home . This resulted in the home objects remaining in memory after the app containing the home // was stopped . try { if ( home . isMessageDrivenHome ( ) ) { ( ( MDBInternalHome ) home ) . deactivateEndpoint ( ) ; // LI2110.41 } } catch ( Throwable e ) { // d138839 FFDCFilter . processException ( e , CLASS_NAME + ".uninstallBean" , "988" , this ) ; } try { // If the container is being terminated , cost of passivating a // cache full of stateful beans could be quite expensive . if ( ! terminate ) activator . uninstallBean ( beanHomeName ) ; } catch ( Throwable e ) { FFDCFilter . processException ( e , CLASS_NAME + ".uninstallBean" , "1853" , this ) ; } home . destroy ( ) ; // F743-1751 - Unregister the home from the wrapper manager after uninstalling the bean // from the activator and destroying the home since either could cause PreDestroy // lifecycle interceptors to be called , which could cause wrappers to be registered . try { wrapperManager . unregisterHome ( beanHomeName , home ) ; } catch ( Throwable e ) { // d138839 FFDCFilter . processException ( e , CLASS_NAME + ".uninstallBean" , "999" , this ) ; } } } catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".uninstallBean" , "1430" , this ) ; ContainerEJBException ex = new ContainerEJBException ( "Failed to destroy the home." , t ) ; Tr . error ( tc , "CAUGHT_EXCEPTION_THROWING_NEW_EXCEPTION_CNTR0035E" , new Object [ ] { t , ex . toString ( ) } ) ; throw ex ; } finally { internalBeanMetaDataStore . remove ( beanHomeName ) ; // d146239.5 if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "uninstallBean" ) ; } // finally / / 202018
public class CharArrayList { /** * Copies element of this type - specific list into the given array using optimized system calls . * @ param from the start index ( inclusive ) . * @ param a the destination array . * @ param offset the offset into the destination array where to store the first element copied . * @ param length the number of elements to be copied . */ private void getElements ( final int from , final char [ ] a , final int offset , final int length ) { } }
CharArrays . ensureOffsetLength ( a , offset , length ) ; System . arraycopy ( this . a , from , a , offset , length ) ;
public class FileSystemView { /** * Snapshots the entries of the working directory of this view . */ public ImmutableSortedSet < Name > snapshotWorkingDirectoryEntries ( ) { } }
store . readLock ( ) . lock ( ) ; try { ImmutableSortedSet < Name > names = workingDirectory . snapshot ( ) ; workingDirectory . updateAccessTime ( ) ; return names ; } finally { store . readLock ( ) . unlock ( ) ; }
public class Hour { /** * Converts a given string into an instance of this class . * @ param str * String to convert . * @ return New instance . */ @ Nullable public static Hour valueOf ( @ Nullable final String str ) { } }
if ( str == null ) { return null ; } return new Hour ( str ) ;
public class EditShape { /** * Append a Geometry to the given geometry of the Edit _ shape */ void appendGeometry ( int dstGeometry , Geometry srcGeometry ) { } }
Geometry . Type gt = srcGeometry . getType ( ) ; if ( Geometry . isMultiPath ( gt . value ( ) ) ) { appendMultiPath_ ( dstGeometry , ( MultiPath ) srcGeometry ) ; return ; } else if ( gt . value ( ) == Geometry . GeometryType . MultiPoint ) { appendMultiPoint_ ( dstGeometry , ( MultiPoint ) srcGeometry ) ; return ; } throw GeometryException . GeometryInternalError ( ) ;
public class CircuitBreakerAspect { /** * the default Java types handling for the circuit breaker AOP */ private Object defaultHandling ( ProceedingJoinPoint proceedingJoinPoint , io . github . resilience4j . circuitbreaker . CircuitBreaker circuitBreaker ) throws Throwable { } }
return circuitBreaker . executeCheckedSupplier ( proceedingJoinPoint :: proceed ) ;
public class Server { /** * Map key ( host + / + context path ) and global scope name * @ param hostName * Host name * @ param contextPath * Context path * @ param globalName * Global scope name * @ return true if mapping was added , false if already exist */ public boolean addMapping ( String hostName , String contextPath , String globalName ) { } }
log . info ( "Add mapping global: {} host: {} context: {}" , new Object [ ] { globalName , hostName , contextPath } ) ; final String key = getKey ( hostName , contextPath ) ; log . debug ( "Add mapping: {} => {}" , key , globalName ) ; return ( mapping . putIfAbsent ( key , globalName ) == null ) ;
public class FedoraAPIMImpl { /** * ( non - Javadoc ) * @ see * org . fcrepo . server . management . FedoraAPIMMTOM # getDatastreamHistory ( java * . lang . String pid , ) String dsID ) * */ @ Override public List < org . fcrepo . server . types . gen . Datastream > getDatastreamHistory ( String pid , String dsID ) { } }
assertInitialized ( ) ; try { MessageContext ctx = context . getMessageContext ( ) ; org . fcrepo . server . storage . types . Datastream [ ] intDatastreams = m_management . getDatastreamHistory ( ReadOnlyContext . getSoapContext ( ctx ) , pid , dsID ) ; return getGenDatastreams ( intDatastreams ) ; } catch ( Throwable th ) { LOG . error ( "Error getting datastream history" , th ) ; throw CXFUtility . getFault ( th ) ; }
public class UpdateDataRetentionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateDataRetentionRequest updateDataRetentionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateDataRetentionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateDataRetentionRequest . getStreamName ( ) , STREAMNAME_BINDING ) ; protocolMarshaller . marshall ( updateDataRetentionRequest . getStreamARN ( ) , STREAMARN_BINDING ) ; protocolMarshaller . marshall ( updateDataRetentionRequest . getCurrentVersion ( ) , CURRENTVERSION_BINDING ) ; protocolMarshaller . marshall ( updateDataRetentionRequest . getOperation ( ) , OPERATION_BINDING ) ; protocolMarshaller . marshall ( updateDataRetentionRequest . getDataRetentionChangeInHours ( ) , DATARETENTIONCHANGEINHOURS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JsonReader { /** * Assigns { @ code nextToken } based on the value of { @ code nextValue } . */ private JsonToken decodeLiteral ( ) throws IOException { } }
if ( valuePos == - 1 ) { // it was too long to fit in the buffer so it can only be a string return JsonToken . STRING ; } else if ( valueLength == 4 && ( 'n' == buffer [ valuePos ] || 'N' == buffer [ valuePos ] ) && ( 'u' == buffer [ valuePos + 1 ] || 'U' == buffer [ valuePos + 1 ] ) && ( 'l' == buffer [ valuePos + 2 ] || 'L' == buffer [ valuePos + 2 ] ) && ( 'l' == buffer [ valuePos + 3 ] || 'L' == buffer [ valuePos + 3 ] ) ) { value = "null" ; return JsonToken . NULL ; } else if ( valueLength == 4 && ( 't' == buffer [ valuePos ] || 'T' == buffer [ valuePos ] ) && ( 'r' == buffer [ valuePos + 1 ] || 'R' == buffer [ valuePos + 1 ] ) && ( 'u' == buffer [ valuePos + 2 ] || 'U' == buffer [ valuePos + 2 ] ) && ( 'e' == buffer [ valuePos + 3 ] || 'E' == buffer [ valuePos + 3 ] ) ) { value = TRUE ; return JsonToken . BOOLEAN ; } else if ( valueLength == 5 && ( 'f' == buffer [ valuePos ] || 'F' == buffer [ valuePos ] ) && ( 'a' == buffer [ valuePos + 1 ] || 'A' == buffer [ valuePos + 1 ] ) && ( 'l' == buffer [ valuePos + 2 ] || 'L' == buffer [ valuePos + 2 ] ) && ( 's' == buffer [ valuePos + 3 ] || 'S' == buffer [ valuePos + 3 ] ) && ( 'e' == buffer [ valuePos + 4 ] || 'E' == buffer [ valuePos + 4 ] ) ) { value = FALSE ; return JsonToken . BOOLEAN ; } else { value = stringPool . get ( buffer , valuePos , valueLength ) ; return decodeNumber ( buffer , valuePos , valueLength ) ; }
public class ApiOvhOrder { /** * Get prices and contracts information * REST : GET / order / cdn / webstorage / { serviceName } / traffic * @ param bandwidth [ required ] Traffic in TB that will be added to the cdn . webstorage service * @ param serviceName [ required ] The internal name of your CDN Static offer */ public OvhOrder cdn_webstorage_serviceName_traffic_GET ( String serviceName , OvhOrderTrafficEnum bandwidth ) throws IOException { } }
String qPath = "/order/cdn/webstorage/{serviceName}/traffic" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "bandwidth" , bandwidth ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ;
public class PropertyCatalogCheck { /** * Assuming that the currently analyzed file is located below the current working directory , this method returns a * new array of exactly < code > pNumSubdirs < / code > elements containing the simple names of the directories on the * path to the currently analyzed file , starting just below the current working directory . * @ param pNumSubdirs the number of subdirectory names to return . If fewer exist , they are padded with * < code > null < / code > * @ return the first n subdirs , where non - existing elements are < code > null < / code > */ @ Nonnull private String [ ] getFirstSubdirs ( final int pNumSubdirs ) { } }
String [ ] result = new String [ pNumSubdirs ] ; Arrays . fill ( result , null ) ; final File thisFile = Util . canonize ( getApiFixer ( ) . getCurrentFileName ( ) ) ; if ( thisFile . getPath ( ) . startsWith ( baseDir . getPath ( ) ) ) { final String relPath = thisFile . getPath ( ) . substring ( baseDir . getPath ( ) . length ( ) + 1 ) ; // incl . separator char final String [ ] pathElements = relPath . split ( Pattern . quote ( File . separator ) , pNumSubdirs + 1 ) ; int i = 0 ; for ( String elem : pathElements ) { if ( i < pNumSubdirs ) { result [ i ++ ] = elem ; } } } return result ;
public class Trie2Writable { /** * Produce an optimized , read - only Trie2_32 from this writable Trie . */ public Trie2_32 toTrie2_32 ( ) { } }
Trie2_32 frozenTrie = new Trie2_32 ( ) ; freeze ( frozenTrie , ValueWidth . BITS_32 ) ; return frozenTrie ;
public class AbstractRole { /** * Updates the term and leader . */ protected boolean updateTermAndLeader ( long term , MemberId leader ) { } }
// If the request indicates a term that is greater than the current term or no leader has been // set for the current term , update leader and term . if ( term > raft . getTerm ( ) || ( term == raft . getTerm ( ) && raft . getLeader ( ) == null && leader != null ) ) { raft . setTerm ( term ) ; raft . setLeader ( leader ) ; // Reset the current cluster configuration to the last committed configuration when a leader change occurs . raft . getCluster ( ) . reset ( ) ; return true ; } return false ;
public class GetInstance { /** * Return a List of all the available Services that implement any of * the specified algorithms . See getServices ( String , String ) for detals . */ public static List < Service > getServices ( List < ServiceId > ids ) { } }
ProviderList list = Providers . getProviderList ( ) ; return list . getServices ( ids ) ;
public class MuxServer { /** * Close the mux */ public void close ( ) throws IOException { } }
isClosed = true ; OutputStream os = this . os ; this . os = null ; InputStream is = this . is ; this . is = null ; if ( os != null ) os . close ( ) ; if ( is != null ) is . close ( ) ;
public class RouteTablesInner { /** * Updates a route table tags . * @ param resourceGroupName The name of the resource group . * @ param routeTableName The name of the route table . * @ param tags Resource tags . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < RouteTableInner > beginUpdateTagsAsync ( String resourceGroupName , String routeTableName , Map < String , String > tags , final ServiceCallback < RouteTableInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginUpdateTagsWithServiceResponseAsync ( resourceGroupName , routeTableName , tags ) , serviceCallback ) ;
public class DeviceImpl { /** * Command State * @ return the state * @ throws DevFailed */ @ Command ( name = STATE_NAME , outTypeDesc = "Device state" ) public DevState executeStateCmd ( ) throws DevFailed { } }
MDC . setContextMap ( contextMap ) ; xlogger . entry ( ) ; xlogger . exit ( ) ; return getState ( ) ;
public class FormulaWriter { /** * Writes a given formula to a file . * @ param fileName the file name of the file * @ param formula the formula to write * @ param splitAndMultiline indicates whether - if the formula is an conjunction - the single operands should be * written to different lines without a conjoining operator * @ throws IOException if there was a problem writing the file */ public static void write ( final String fileName , final Formula formula , final boolean splitAndMultiline ) throws IOException { } }
write ( new File ( fileName ) , formula , splitAndMultiline , formula . factory ( ) . stringRepresentation ( ) ) ;
public class ResourceBundlesHandlerImpl { /** * Returns the bundle iterator * @ param debugMode * the flag indicating if we are in debug mode or not * @ param commentCallbackHandler * the comment callback handler * @ param variants * the variant map * @ return the bundle iterator */ private ResourceBundlePathsIterator getBundleIterator ( DebugMode debugMode , List < JoinableResourceBundle > bundles , ConditionalCommentCallbackHandler commentCallbackHandler , Map < String , String > variants ) { } }
ResourceBundlePathsIterator bundlesIterator ; if ( debugMode . equals ( DebugMode . DEBUG ) ) { bundlesIterator = new DebugModePathsIteratorImpl ( bundles , commentCallbackHandler , variants ) ; } else if ( debugMode . equals ( DebugMode . FORCE_NON_DEBUG_IN_IE ) ) { bundlesIterator = new IECssDebugPathsIteratorImpl ( bundles , commentCallbackHandler , variants ) ; } else { bundlesIterator = new PathsIteratorImpl ( bundles , commentCallbackHandler , variants ) ; } return bundlesIterator ;
public class GatewayServlet { /** * Generic handler for all types of http actions / verbs . * @ param req * @ param resp * @ param action */ protected void doAction ( final HttpServletRequest req , final HttpServletResponse resp , String action ) { } }
// Read the request . ApiRequest srequest ; try { srequest = readRequest ( req ) ; srequest . setType ( action ) ; } catch ( Exception e ) { writeError ( null , resp , e ) ; return ; } final CountDownLatch latch = new CountDownLatch ( 1 ) ; final ApiRequest finalRequest = srequest ; // Now execute the request via the apiman engine IApiRequestExecutor executor = getEngine ( ) . executor ( srequest , new IAsyncResultHandler < IEngineResult > ( ) { @ Override public void handle ( IAsyncResult < IEngineResult > asyncResult ) { if ( asyncResult . isSuccess ( ) ) { IEngineResult engineResult = asyncResult . getResult ( ) ; if ( engineResult . isResponse ( ) ) { try { writeResponse ( resp , engineResult . getApiResponse ( ) ) ; final ServletOutputStream outputStream = resp . getOutputStream ( ) ; engineResult . bodyHandler ( new IAsyncHandler < IApimanBuffer > ( ) { @ Override public void handle ( IApimanBuffer chunk ) { try { if ( chunk instanceof ByteBuffer ) { byte [ ] buffer = ( byte [ ] ) chunk . getNativeBuffer ( ) ; outputStream . write ( buffer , 0 , chunk . length ( ) ) ; } else { outputStream . write ( chunk . getBytes ( ) ) ; } } catch ( IOException e ) { // This will get caught by the API connector , which will abort the // connection to the back - end API . throw new RuntimeException ( e ) ; } } } ) ; engineResult . endHandler ( new IAsyncHandler < Void > ( ) { @ Override public void handle ( Void result ) { try { resp . flushBuffer ( ) ; } catch ( IOException e ) { // This will get caught by the API connector , which will abort the // connection to the back - end API . throw new RuntimeException ( e ) ; } finally { latch . countDown ( ) ; } } } ) ; } catch ( IOException e ) { // this would mean we couldn ' t get the output stream from the response , so we // need to abort the engine result ( which will let the back - end connection // close down ) . engineResult . abort ( e ) ; latch . countDown ( ) ; throw new RuntimeException ( e ) ; } } else { writeFailure ( finalRequest , resp , engineResult . getPolicyFailure ( ) ) ; latch . countDown ( ) ; } } else { writeError ( finalRequest , resp , asyncResult . getError ( ) ) ; latch . countDown ( ) ; } } } ) ; executor . streamHandler ( new IAsyncHandler < ISignalWriteStream > ( ) { @ Override public void handle ( ISignalWriteStream connectorStream ) { try { final InputStream is = req . getInputStream ( ) ; ByteBuffer buffer = new ByteBuffer ( 2048 ) ; int numBytes = buffer . readFrom ( is ) ; while ( numBytes != - 1 ) { connectorStream . write ( buffer ) ; numBytes = buffer . readFrom ( is ) ; } connectorStream . end ( ) ; } catch ( Throwable e ) { connectorStream . abort ( e ) ; } } } ) ; executor . execute ( ) ; try { latch . await ( ) ; } catch ( InterruptedException e ) { }
public class DecadeSpellingFilter { /** * Gets the Roman notation for numbers . * @ param num the integer to convert * @ return the String using the number . */ private String getRomanNumber ( int num ) { } }
String roman = "" ; // The roman numeral . int N = num ; // N represents the part of num that still has // to be converted to Roman numeral representation . for ( int i = 0 ; i < numbers . length ; i ++ ) { while ( N >= numbers [ i ] ) { roman += letters [ i ] ; N -= numbers [ i ] ; } } return roman ;
public class DefaultFileIO { /** * Read a XML document and return a projection to it . * @ param projectionInterface * @ return a new projection pointing to the content of the file . * @ throws IOException */ @ Override public < T > T read ( final Class < T > projectionInterface ) throws IOException { } }
try { Document document = projector . config ( ) . createDocumentBuilder ( ) . parse ( file ) ; return projector . projectDOMNode ( document , projectionInterface ) ; } catch ( SAXException e ) { throw new XBDocumentParsingException ( e ) ; }
public class AbstractGreenPepperMacro { /** * < p > getPageTitle . < / p > * @ param parameters a { @ link java . util . Map } object . * @ param renderContext a { @ link com . atlassian . renderer . RenderContext } object . * @ param spaceKey a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . * @ throws com . greenpepper . server . GreenPepperServerException if any . */ @ SuppressWarnings ( "unchecked" ) protected String getPageTitle ( @ SuppressWarnings ( "rawtypes" ) Map parameters , RenderContext renderContext , String spaceKey ) throws GreenPepperServerException { } }
return getPage ( parameters , renderContext , spaceKey ) . getTitle ( ) . trim ( ) ;
public class WSJdbcDataSource { /** * Locate the underlying JDBC driver ' s implementation of the specified interface . * @ param interfaceClass the interface . * @ return the underlying JDBC driver ' s implementation of the specified interface , * or NULL if none is found . * @ throws SQLException if an error occurs locating or unwrapping the implementation . */ @ Override @ SuppressWarnings ( "unchecked" ) protected < T > T getJDBCImplObject ( Class < T > interfaceClass ) throws SQLException { } }
Object jdbcImplObject = getJDBCImplObject ( ) ; if ( jdbcImplObject == null ) { return null ; } Object impl = WSJdbcTracer . getImpl ( jdbcImplObject ) ; return interfaceClass . isInstance ( impl ) ? ( T ) impl : null ;
public class DescribeVpcPeeringAuthorizationsResult { /** * Collection of objects that describe all valid VPC peering operations for the current AWS account . * @ param vpcPeeringAuthorizations * Collection of objects that describe all valid VPC peering operations for the current AWS account . */ public void setVpcPeeringAuthorizations ( java . util . Collection < VpcPeeringAuthorization > vpcPeeringAuthorizations ) { } }
if ( vpcPeeringAuthorizations == null ) { this . vpcPeeringAuthorizations = null ; return ; } this . vpcPeeringAuthorizations = new java . util . ArrayList < VpcPeeringAuthorization > ( vpcPeeringAuthorizations ) ;
public class AnalysisContext { /** * Load an interprocedural property database . * @ param < DatabaseType > * actual type of the database * @ param < KeyType > * type of key ( e . g . , method or field ) * @ param < Property > * type of properties stored in the database * @ param database * the empty database object * @ param fileName * file to load database from * @ param description * description of the database ( for diagnostics ) * @ return the database object , or null if the database couldn ' t be loaded */ public < DatabaseType extends PropertyDatabase < KeyType , Property > , KeyType extends FieldOrMethodDescriptor , Property > DatabaseType loadPropertyDatabase ( DatabaseType database , String fileName , String description ) { } }
try { File dbFile = new File ( getDatabaseInputDir ( ) , fileName ) ; if ( DEBUG ) { System . out . println ( "Loading " + description + " from " + dbFile . getPath ( ) + "..." ) ; } database . readFromFile ( dbFile . getPath ( ) ) ; return database ; } catch ( IOException e ) { getLookupFailureCallback ( ) . logError ( "Error loading " + description , e ) ; } catch ( PropertyDatabaseFormatException e ) { getLookupFailureCallback ( ) . logError ( "Invalid " + description , e ) ; } return null ;
public class SibRaEngineComponent { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . admin . JsEngineComponent # initialize ( com . ibm . ws . sib . admin . JsMessagingEngine ) */ public void initialize ( final JsMessagingEngine engine ) { } }
final String methodName = "initialize" ; if ( TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , methodName , engine ) ; } final Set listeners ; _messagingEngine = engine ; synchronized ( MESSAGING_ENGINES ) { // Do we already have a set of initialized MEs on this bus ? Set messagingEngines = ( Set ) MESSAGING_ENGINES . get ( engine . getBusName ( ) ) ; // If not , create a new set and add it to the map if ( messagingEngines == null ) { messagingEngines = new HashSet ( ) ; MESSAGING_ENGINES . put ( engine . getBusName ( ) , messagingEngines ) ; } // Add the initializing ME to the set messagingEngines . add ( engine ) ; // Get listeners to notify listeners = getListeners ( _messagingEngine . getBusName ( ) ) ; } // Notify listeners for ( final Iterator iterator = listeners . iterator ( ) ; iterator . hasNext ( ) ; ) { final SibRaMessagingEngineListener listener = ( SibRaMessagingEngineListener ) iterator . next ( ) ; listener . messagingEngineInitializing ( _messagingEngine ) ; } if ( TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , methodName ) ; }
public class JsFacade { /** * Dialogs */ @ UsedByApp public void bindDialogs ( JsDisplayListCallback < JsDialog > callback ) { } }
if ( callback == null ) { return ; } messenger . getSharedDialogList ( ) . subscribe ( callback , false ) ;
public class AzureClient { /** * Polls from the ' Azure - AsyncOperation ' header and updates the polling * state with the polling response . * @ param pollingState the polling state for the current operation . * @ param < T > the return type of the caller . */ private < T > Observable < PollingState < T > > updateStateFromAzureAsyncOperationHeaderOnPutAsync ( final PollingState < T > pollingState ) { } }
return pollAsync ( pollingState . azureAsyncOperationHeaderLink ( ) , pollingState . loggingContext ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < PollingState < T > > > ( ) { @ Override public Observable < PollingState < T > > call ( Response < ResponseBody > response ) { final AzureAsyncOperation asyncOperation ; try { asyncOperation = AzureAsyncOperation . fromResponse ( restClient ( ) . serializerAdapter ( ) , response ) ; } catch ( CloudException exception ) { return Observable . error ( exception ) ; } pollingState . withStatus ( asyncOperation . status ( ) ) ; pollingState . withErrorBody ( asyncOperation . getError ( ) ) ; pollingState . withResponse ( response ) ; pollingState . withResource ( null ) ; return Observable . just ( pollingState ) ; } } ) ;
public class JsMsgObject { /** * Encode the message into a single DataSlice . * The DataSlice will be used by the Comms component to transmit the message * over the wire . * This method may only be used for a single - part message . * @ param conn - the CommsConnection over which this encoded message will be sent . * This may be null if the message is not really being encoded for transmission . * @ return The DataSlice which contains the encoded message * @ exception MessageEncodeFailedException is thrown if the message failed to encode . */ DataSlice encodeSinglePartMessage ( Object conn ) throws MessageEncodeFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "encodeSinglePartMessage" ) ; // We must only use this method if the message does not have a payload part if ( payloadPart != null ) { MessageEncodeFailedException mefe = new MessageEncodeFailedException ( "Invalid call to encodeSinglePartMessage" ) ; FFDCFilter . processException ( mefe , "com.ibm.ws.sib.mfp.impl.JsMsgObject.encodeSinglePartMessage" , "jmo530" , this , new Object [ ] { MfpConstants . DM_MESSAGE , headerPart . jmfPart , theMessage } ) ; throw mefe ; } // The ' conn ' parameter ( if supplied ) is passed as an Object ( for no good reason except the // daft build system ) , but it has to be a CommsConnection instance . if ( conn != null && ! ( conn instanceof CommsConnection ) ) { throw new MessageEncodeFailedException ( "Incorrect connection object: " + conn . getClass ( ) ) ; } byte [ ] buffer = null ; // Encoding is handled by JMF . This will be a very cheap operation if JMF already has the // message in assembled form ( for example if it was previously decoded from a byte buffer and // has had no major changes ) . try { // We need to lock the message around the call to updateDataFields ( ) and the // actual encode of the part ( s ) , so that noone can update any JMF message data // during that time ( because they can not get the hdr2 , api , or payload part ) . // Otherwise it is possible to get an inconsistent view of the message with some updates // included but those to the cached values ' missing ' . // It is still strictly possible for the top - level schema header fields to be // updated , but this will not happen to any fields visible to an app . synchronized ( theMessage ) { // Ensure any cached message data is written back theMessage . updateDataFields ( MfpConstants . UDF_ENCODE ) ; // We need to check if the receiver has all the necessary schema definitions to be able // to decode this message and pre - send any that are missing . ensureReceiverHasSchemata ( ( CommsConnection ) conn ) ; // Synchronize this section on the JMF Message , as otherwise the length could // change between allocating the array & encoding the header part into it . synchronized ( getPartLockArtefact ( headerPart ) ) { // Allocate a buffer for the Ids and the message part buffer = new byte [ IDS_LENGTH + ArrayUtil . INT_SIZE + ( ( JMFMessage ) headerPart . jmfPart ) . getEncodedLength ( ) ] ; // Write the Ids to the buffer int offset = encodeIds ( buffer , 0 ) ; // Write the header part to the buffer & add it to the buffer list encodePartToBuffer ( headerPart , true , buffer , offset ) ; } } } catch ( MessageEncodeFailedException e1 ) { // This will have been thrown by encodePartToBuffer ( ) which will // already have dumped the appropriate message part . FFDCFilter . processException ( e1 , "com.ibm.ws.sib.mfp.impl.JsMsgObject.encodeSinglePartMessage" , "jmo500" , this , new Object [ ] { MfpConstants . DM_MESSAGE , null , theMessage } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "encodeSinglePartMessage failed: " + e1 ) ; throw e1 ; } catch ( Exception e ) { // This is most likely to be thrown by the getEncodedLength ( ) call on the header // so we pass the header part to the diagnostic module . FFDCFilter . processException ( e , "com.ibm.ws.sib.mfp.impl.JsMsgObject.encodeSinglePartMessage" , "jmo520" , this , new Object [ ] { MfpConstants . DM_MESSAGE , headerPart . jmfPart , theMessage } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "encodeSinglePartMessage failed: " + e ) ; throw new MessageEncodeFailedException ( e ) ; } // If debug trace , dump the buffer before returning if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Encoded JMF Message" , debugMsg ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( this , tc , "buffers: " , SibTr . formatBytes ( buffer , 0 , buffer . length ) ) ; } // Wrap the buffer in a DataSlice to return DataSlice slice = new DataSlice ( buffer ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "encodeSinglePartMessage" , slice ) ; return slice ;
public class AnimaQuery { /** * Build a select statement . * @ param addOrderBy add the order by clause . * @ return select sql */ private String buildSelectSQL ( boolean addOrderBy ) { } }
SQLParams sqlParams = SQLParams . builder ( ) . modelClass ( this . modelClass ) . selectColumns ( this . selectColumns ) . tableName ( this . tableName ) . pkName ( this . primaryKeyColumn ) . conditionSQL ( this . conditionSQL ) . excludedColumns ( this . excludedColumns ) . isSQLLimit ( isSQLLimit ) . build ( ) ; ifThen ( addOrderBy , ( ) -> sqlParams . setOrderBy ( this . orderBySQL . toString ( ) ) ) ; return Anima . of ( ) . dialect ( ) . select ( sqlParams ) ;
public class BaseJavaFileManager { /** * { @ inheritDoc } */ @ Override public JavaFileObject getJavaFileForOutput ( final Location location , final String qualifiedName , final Kind kind , final FileObject outputFile ) { } }
return this . compiledJavaFileObject ;
public class MediaDefault { /** * Create media from file . * @ param prefix The prefix path . * @ param prefixLength The prefix length . * @ param file The file to created as media . * @ return The created media . */ private Media create ( String prefix , int prefixLength , File file ) { } }
final String currentPath = file . getPath ( ) ; final String [ ] systemPath = SLASH . split ( currentPath . substring ( currentPath . indexOf ( prefix ) + prefixLength ) . replace ( File . separator , Constant . SLASH ) ) ; final Media media ; if ( loader . isPresent ( ) ) { media = new MediaDefault ( separator , loader . get ( ) , UtilFolder . getPathSeparator ( separator , systemPath ) ) ; } else { media = new MediaDefault ( separator , resourcesDir , UtilFolder . getPathSeparator ( separator , systemPath ) ) ; } return media ;
public class OutputGate { public void initializeChannels ( GateDeploymentDescriptor descriptor ) { } }
int numChannels = descriptor . getNumberOfChannelDescriptors ( ) ; this . channels = new OutputChannel [ numChannels ] ; setChannelType ( descriptor . getChannelType ( ) ) ; for ( int i = 0 ; i < numChannels ; i ++ ) { ChannelDeploymentDescriptor channelDescriptor = descriptor . getChannelDescriptor ( i ) ; ChannelID id = channelDescriptor . getOutputChannelID ( ) ; ChannelID connectedId = channelDescriptor . getInputChannelID ( ) ; this . channels [ i ] = new OutputChannel ( this , i , id , connectedId , getChannelType ( ) ) ; }
public class BaseNDArrayFactory { /** * Random normal using the given rng * @ param rows the number of rows in the matrix * @ param columns the number of columns in the matrix * @ param r the random generator to use * @ return */ @ Override public INDArray randn ( long rows , long columns , org . nd4j . linalg . api . rng . Random r ) { } }
return randn ( new long [ ] { rows , columns } , r ) ;
public class DocumentFormats { /** * write , read , and delete the document */ public static void writeReadDeleteDocument ( DatabaseClient client , String filename , String format ) throws IOException { } }
// create a manager for documents of any format GenericDocumentManager docMgr = client . newDocumentManager ( ) ; InputStream docStream = Util . openStream ( "data" + File . separator + filename ) ; if ( docStream == null ) throw new IOException ( "Could not read document example" ) ; // create an identifier for the document String docId = "/example/" + filename ; // create a handle for the document InputStreamHandle writeHandle = new InputStreamHandle ( ) ; writeHandle . set ( docStream ) ; // write the document docMgr . write ( docId , writeHandle ) ; // create a handle to receive the document content BytesHandle readHandle = new BytesHandle ( ) ; // read the document content docMgr . read ( docId , readHandle ) ; // access the document content byte [ ] document = readHandle . get ( ) ; // . . . do something with the document content . . . // delete the document docMgr . delete ( docId ) ; System . out . println ( "Wrote, read, and deleted /example/" + filename + " content with " + document . length + " bytes in the " + format + " format" ) ;
public class RandomDateUtils { /** * Returns a random { @ link Instant } that is after the given { @ link Instant } . * @ param after the value that returned { @ link Instant } must be after * @ return the random { @ link Instant } * @ throws IllegalArgumentException if after is null or if after is equal to or after { @ link * RandomDateUtils # MAX _ INSTANT } */ public static Instant randomInstantAfter ( Instant after ) { } }
checkArgument ( after != null , "After must be non-null" ) ; checkArgument ( after . isBefore ( MAX_INSTANT ) , "Cannot produce date after %s" , MAX_INSTANT ) ; return randomInstant ( after . plus ( 1 , MILLIS ) , MAX_INSTANT ) ;
public class UCharacterUtility { /** * Determines if codepoint is a non character * @ param ch codepoint * @ return true if codepoint is a non character false otherwise */ public static boolean isNonCharacter ( int ch ) { } }
if ( ( ch & NON_CHARACTER_SUFFIX_MIN_3_0_ ) == NON_CHARACTER_SUFFIX_MIN_3_0_ ) { return true ; } return ch >= NON_CHARACTER_MIN_3_1_ && ch <= NON_CHARACTER_MAX_3_1_ ;
public class NodeSet { /** * Tests whether the selector string matches the item , in three possible ways : first , if the selector looks like : * " / . . . / " then it the outer ' / ' chars are removed and it is treated as a regular expression * only * and * PatternSyntaxExceptions are not caught . Otherwise , it is treated as a regular expression and any * PatternSyntaxExceptions are caught . If it does not match or if the pattern is invalid , it is tested for string * equality with the input . * @ param inputSelector test string which may be a regular expression , or explicit regular expression string wrapped * in ' / ' characters * @ param item item to test * @ return true if the item matches the selector */ public static boolean matchRegexOrEquals ( final String inputSelector , final String item ) { } }
// see if inputSelector is wrapped in ' / ' chars String testregex = inputSelector ; if ( testregex . length ( ) >= 2 && testregex . indexOf ( '/' ) == 0 && testregex . lastIndexOf ( '/' ) == testregex . length ( ) - 1 ) { testregex = inputSelector . substring ( 1 , inputSelector . length ( ) - 1 ) ; return item . matches ( testregex . trim ( ) ) ; } boolean match = false ; try { match = item . matches ( inputSelector . trim ( ) ) ; } catch ( PatternSyntaxException e ) { } return match || inputSelector . trim ( ) . equals ( item ) ;
public class IMatrix { /** * Multiplikation from two matrices */ public IMatrix mul ( IMatrix b ) { } }
IMatrix result = new IMatrix ( rows , columns ) ; mul ( b , result ) ; return result ;
public class BlockBasedDataStore { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . storage . data . impl . AbstractBlockBasedDataStore # writeAllocationBlock ( int ) */ @ Override protected void writeAllocationBlock ( int blockIndex ) throws DataStoreException { } }
byte [ ] allocationBlock = serializeAllocationBlock ( blockIndex ) ; try { allocationTableRandomAccessFile . seek ( AT_HEADER_SIZE + blockIndex * AT_BLOCK_SIZE ) ; allocationTableRandomAccessFile . write ( allocationBlock ) ; } catch ( IOException e ) { throw new DataStoreException ( "Cannot write to allocation table file : " + allocationTableFile . getAbsolutePath ( ) , e ) ; }
public class SQLParserEngine { /** * Parse SQL to abstract syntax tree . * @ return abstract syntax tree of SQL */ public SQLAST parse ( ) { } }
ParseTree parseTree = SQLParserFactory . newInstance ( databaseType , sql ) . execute ( ) . getChild ( 0 ) ; if ( parseTree instanceof ErrorNode ) { throw new SQLParsingUnsupportedException ( String . format ( "Unsupported SQL of `%s`" , sql ) ) ; } Optional < SQLStatementRule > sqlStatementRule = parsingRuleRegistry . findSQLStatementRule ( databaseType , parseTree . getClass ( ) . getSimpleName ( ) ) ; if ( sqlStatementRule . isPresent ( ) ) { return new SQLAST ( ( ParserRuleContext ) parseTree , sqlStatementRule . get ( ) ) ; } if ( parsingRuleRegistry instanceof EncryptParsingRuleRegistry ) { return new SQLAST ( ( ParserRuleContext ) parseTree ) ; } throw new SQLParsingUnsupportedException ( String . format ( "Unsupported SQL of `%s`" , sql ) ) ;
public class GeometryUtils { /** * Calculate the surface tangent for the three supplied vertices and UV coordinates and store the result in < code > dest < / code > . * @ param v1 * XYZ of first vertex * @ param uv1 * UV of first vertex * @ param v2 * XYZ of second vertex * @ param uv2 * UV of second vertex * @ param v3 * XYZ of third vertex * @ param uv3 * UV of third vertex * @ param dest * the tangent will be stored here */ public static void tangent ( Vector3fc v1 , Vector2fc uv1 , Vector3fc v2 , Vector2fc uv2 , Vector3fc v3 , Vector2fc uv3 , Vector3f dest ) { } }
float DeltaV1 = uv2 . y ( ) - uv1 . y ( ) ; float DeltaV2 = uv3 . y ( ) - uv1 . y ( ) ; float f = 1.0f / ( ( uv2 . x ( ) - uv1 . x ( ) ) * DeltaV2 - ( uv3 . x ( ) - uv1 . x ( ) ) * DeltaV1 ) ; dest . x = f * ( DeltaV2 * ( v2 . x ( ) - v1 . x ( ) ) - DeltaV1 * ( v3 . x ( ) - v1 . x ( ) ) ) ; dest . y = f * ( DeltaV2 * ( v2 . y ( ) - v1 . y ( ) ) - DeltaV1 * ( v3 . y ( ) - v1 . y ( ) ) ) ; dest . z = f * ( DeltaV2 * ( v2 . z ( ) - v1 . z ( ) ) - DeltaV1 * ( v3 . z ( ) - v1 . z ( ) ) ) ; dest . normalize ( ) ;
public class NodeUtil { /** * Records a mapping of names to vars of everything reachable in a function . Should only be called * with a function scope . Does not enter new control flow areas aka embedded functions . * @ param nameVarMap an empty map that gets populated with the keys being variable names and * values being variable objects * @ param orderedVars an empty list that gets populated with variable objects in the order that * they appear in the fn */ static void getAllVarsDeclaredInFunction ( final Map < String , Var > nameVarMap , final List < Var > orderedVars , AbstractCompiler compiler , ScopeCreator scopeCreator , final Scope scope ) { } }
checkState ( nameVarMap . isEmpty ( ) ) ; checkState ( orderedVars . isEmpty ( ) ) ; checkState ( scope . isFunctionScope ( ) , scope ) ; ScopedCallback finder = new ScopedCallback ( ) { @ Override public void enterScope ( NodeTraversal t ) { Scope currentScope = t . getScope ( ) ; for ( Var v : currentScope . getVarIterable ( ) ) { nameVarMap . put ( v . getName ( ) , v ) ; orderedVars . add ( v ) ; } } @ Override public void exitScope ( NodeTraversal t ) { } @ Override public final boolean shouldTraverse ( NodeTraversal t , Node n , Node parent ) { // Don ' t enter any new functions return ! n . isFunction ( ) || n == scope . getRootNode ( ) ; } @ Override public void visit ( NodeTraversal t , Node n , Node parent ) { } } ; NodeTraversal t = new NodeTraversal ( compiler , finder , scopeCreator ) ; t . traverseAtScope ( scope ) ;
public class CreateDeploymentGroupRequest { /** * The target Amazon ECS services in the deployment group . This applies only to deployment groups that use the * Amazon ECS compute platform . A target Amazon ECS service is specified as an Amazon ECS cluster and service name * pair using the format < code > & lt ; clustername & gt ; : & lt ; servicename & gt ; < / code > . * @ return The target Amazon ECS services in the deployment group . This applies only to deployment groups that use * the Amazon ECS compute platform . A target Amazon ECS service is specified as an Amazon ECS cluster and * service name pair using the format < code > & lt ; clustername & gt ; : & lt ; servicename & gt ; < / code > . */ public java . util . List < ECSService > getEcsServices ( ) { } }
if ( ecsServices == null ) { ecsServices = new com . amazonaws . internal . SdkInternalList < ECSService > ( ) ; } return ecsServices ;
public class SpotFleetRequestConfigData { /** * The launch template and overrides . If you specify < code > LaunchTemplateConfigs < / code > , you can ' t specify * < code > LaunchSpecifications < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setLaunchTemplateConfigs ( java . util . Collection ) } or * { @ link # withLaunchTemplateConfigs ( java . util . Collection ) } if you want to override the existing values . * @ param launchTemplateConfigs * The launch template and overrides . If you specify < code > LaunchTemplateConfigs < / code > , you can ' t specify * < code > LaunchSpecifications < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public SpotFleetRequestConfigData withLaunchTemplateConfigs ( LaunchTemplateConfig ... launchTemplateConfigs ) { } }
if ( this . launchTemplateConfigs == null ) { setLaunchTemplateConfigs ( new com . amazonaws . internal . SdkInternalList < LaunchTemplateConfig > ( launchTemplateConfigs . length ) ) ; } for ( LaunchTemplateConfig ele : launchTemplateConfigs ) { this . launchTemplateConfigs . add ( ele ) ; } return this ;
public class AcidicGroupCountDescriptor { /** * { @ inheritDoc } */ @ Override public Object getParameterType ( String name ) { } }
Object object = null ; if ( name . equals ( "checkAromaticity" ) ) object = true ; return ( object ) ;
public class PasswordPolicyService { /** * Returns the age of the given user ' s password , in days . The age of a * user ' s password is the amount of time elapsed since the password was last * changed or reset . * @ param user * The user to calculate the password age of . * @ return * The age of the given user ' s password , in days . */ private long getPasswordAge ( ModeledUser user ) { } }
// If no password was set , then no time has elapsed PasswordRecordModel passwordRecord = user . getPasswordRecord ( ) ; if ( passwordRecord == null ) return 0 ; // Pull both current time and the time the password was last reset long currentTime = System . currentTimeMillis ( ) ; long lastResetTime = passwordRecord . getPasswordDate ( ) . getTime ( ) ; // Calculate the number of days elapsed since the password was last reset return TimeUnit . DAYS . convert ( currentTime - lastResetTime , TimeUnit . MILLISECONDS ) ;
public class DogmaApi { /** * Get dynamic item information Returns info about a dynamic item resulting * from mutation with a mutaplasmid . - - - This route expires daily at 11:05 * @ param itemId * item _ id integer ( required ) * @ param typeId * type _ id integer ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return ApiResponse & lt ; DogmaDynamicItemsResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < DogmaDynamicItemsResponse > getDogmaDynamicItemsTypeIdItemIdWithHttpInfo ( Long itemId , Integer typeId , String datasource , String ifNoneMatch ) throws ApiException { } }
com . squareup . okhttp . Call call = getDogmaDynamicItemsTypeIdItemIdValidateBeforeCall ( itemId , typeId , datasource , ifNoneMatch , null ) ; Type localVarReturnType = new TypeToken < DogmaDynamicItemsResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class RobotMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Robot robot , ProtocolMarshaller protocolMarshaller ) { } }
if ( robot == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( robot . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( robot . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( robot . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( robot . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( robot . getGreenGrassGroupId ( ) , GREENGRASSGROUPID_BINDING ) ; protocolMarshaller . marshall ( robot . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( robot . getArchitecture ( ) , ARCHITECTURE_BINDING ) ; protocolMarshaller . marshall ( robot . getLastDeploymentJob ( ) , LASTDEPLOYMENTJOB_BINDING ) ; protocolMarshaller . marshall ( robot . getLastDeploymentTime ( ) , LASTDEPLOYMENTTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VcfWriter { /** * Write VCF column header with the specified print writer . * @ param samples zero or more VCF samples , must not be null * @ param writer print writer to write VCF with , must not be null */ public static void writeColumnHeader ( final List < VcfSample > samples , final PrintWriter writer ) { } }
checkNotNull ( samples ) ; checkNotNull ( writer ) ; StringBuilder sb = new StringBuilder ( "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO" ) ; if ( ! samples . isEmpty ( ) ) { sb . append ( "\tFORMAT" ) ; } for ( VcfSample sample : samples ) { sb . append ( "\t" ) ; sb . append ( sample . getId ( ) ) ; } writer . println ( sb . toString ( ) ) ;
public class SettingsBean { /** * helper methods used in Spring xml files with SpEL */ public String getMailServerIp ( ) { } }
Settings settings = getSettings ( ) ; if ( settings . getMailServer ( ) == null ) { return "127.0.0.1" ; } return settings . getMailServer ( ) . getIp ( ) ;
public class Types { /** * Resolve the given base type and its members . * @ param type * the base type to resolve * @ param typeParameters * the type parameters * @ return * resolve type instance */ @ NonNull public static ResolvedTypeWithMembers resolveMembers ( @ NonNull Type type , @ NonNull Type ... typeParameters ) { } }
ResolvedType rt = typeResolver . resolve ( type , typeParameters ) ; return memberResolver . resolve ( rt , null , null ) ;
public class TimeSpan { /** * Returns { @ code true } if the end date occurs after the start date during * the period of time represented by this time span . */ public boolean insideRange ( long startDate , long endDate ) { } }
Calendar c1 = Calendar . getInstance ( ) ; Calendar c2 = Calendar . getInstance ( ) ; c1 . setTimeInMillis ( startDate ) ; c2 . setTimeInMillis ( endDate ) ; return isInRange ( c1 , c2 ) ;
public class Postcard { /** * Inserts an int value into the mapping of this Bundle , replacing * any existing value for the given key . * @ param key a String , or null * @ param value an int * @ return current */ public Postcard withInt ( @ Nullable String key , int value ) { } }
mBundle . putInt ( key , value ) ; return this ;
public class Messages { /** * Wait for a specific message . * Returns as soon as an message matching the specified search criteria is found . * @ param server The identifier of the server hosting the message . * @ param criteria The search criteria to use in order to find a match . * @ throws MailosaurException thrown if the request is rejected by server * @ throws IOException * @ return the Message object if successful . */ public Message waitFor ( String server , SearchCriteria criteria ) throws IOException , MailosaurException { } }
HashMap < String , String > query = new HashMap < String , String > ( ) ; query . put ( "server" , server ) ; return client . request ( "POST" , "api/messages/await" , criteria , query ) . parseAs ( Message . class ) ;
public class QrHelperFunctions_DSCC { /** * Performs a rank - 1 update operation on the submatrix specified by V with the multiply on the right . < br > * < br > * C = ( I - & gamma ; * v * v < sup > T < / sup > ) * A < br > * The order that matrix multiplies are performed has been carefully selected * to minimize the number of operations . * Before this can become a truly generic operation the submatrix specification needs * to be made more generic . */ public static void rank1UpdateMultR ( DMatrixSparseCSC V , int colV , double gamma , DMatrixSparseCSC A , DMatrixSparseCSC C , IGrowArray gw , DGrowArray gx ) { } }
if ( V . numRows != A . numRows ) throw new IllegalArgumentException ( "Number of rows in V and A must match" ) ; C . nz_length = 0 ; C . numRows = V . numRows ; C . numCols = 0 ; for ( int i = 0 ; i < A . numCols ; i ++ ) { double tau = CommonOps_DSCC . dotInnerColumns ( V , colV , A , i , gw , gx ) ; ImplCommonOps_DSCC . addColAppend ( 1.0 , A , i , - gamma * tau , V , colV , C , gw ) ; }
public class LocalSymbolTable { /** * NOT SYNCHRONIZED ! Call within constructor or from synch ' d method . */ int putSymbol ( String symbolName ) { } }
if ( isReadOnly ) { throw new ReadOnlyValueException ( SymbolTable . class ) ; } if ( mySymbolsCount == mySymbolNames . length ) { int newlen = mySymbolsCount * 2 ; if ( newlen < DEFAULT_CAPACITY ) { newlen = DEFAULT_CAPACITY ; } String [ ] temp = new String [ newlen ] ; System . arraycopy ( mySymbolNames , 0 , temp , 0 , mySymbolsCount ) ; mySymbolNames = temp ; } int sid = - 1 ; if ( symbolName != null ) { sid = mySymbolsCount + myFirstLocalSid ; assert sid == getMaxId ( ) + 1 ; putToMapIfNotThere ( mySymbolsMap , symbolName , sid ) ; } mySymbolNames [ mySymbolsCount ] = symbolName ; mySymbolsCount ++ ; return sid ;