signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ReplicationTrigger { /** * Deletes the replica entry with replication disabled . */ boolean tryDeleteReplica ( Storable replica ) throws PersistException { } }
// Prevent trigger from being invoked by deleting replica . TriggerManager tm = mTriggerManager ; tm . locallyDisableDelete ( ) ; try { return replica . tryDelete ( ) ; } finally { tm . locallyEnableDelete ( ) ; }
public class ByteAmount { /** * Divides by factor , rounding any remainder . For example 10 bytes / 6 = 1.66 becomes 2 . Use * caution when dividing and be aware that because of precision lose due to round - off , dividing by * X and multiplying back by X might not return the initial value . * @ param factor value to divide by * @ return a new ByteValue of this ByteValue divided by factor */ @ Override public ByteAmount divide ( int factor ) { } }
checkArgument ( factor != 0 , String . format ( "Can not divide %s by 0" , this ) ) ; return ByteAmount . fromBytes ( Math . round ( value . doubleValue ( ) / factor ) ) ;
public class SearchFilter { /** * Combine other search filters with this one , using the specific operator . * @ param new _ filters is a vector of SearchFilter classes to be combined * @ param op is the logical operator to be used to combine the filters * @ exception DBException */ public void combine ( Vector new_filters , int op ) throws DBException { } }
// If this is not a logical operator , throw an exception if ( ( op & LOGICAL_OPER_MASK ) == 0 ) { throw new DBException ( ) ; // Create a new vector consisting of just the filters // from the SearchFilter classes in new _ filters } Vector filters = new Vector ( ) ; // Now add in all the nodes of the new filters for ( Enumeration e = new_filters . elements ( ) ; e . hasMoreElements ( ) ; ) { // Get the search filter from the vector SearchFilter f = ( SearchFilter ) e . nextElement ( ) ; filters . addElement ( f . getFilter ( ) ) ; } // Create a node for this list and return it m_filter = new SearchBaseNode ( op , m_filter , filters ) ;
public class TableTreeNode { /** * Adjusts the node count of this node and all parent nodes . Called when nodes are being added / removed from the * tree . * @ param delta the change in the node count . */ private void adjustNodeCount ( final int delta ) { } }
for ( TableTreeNode parent = this ; parent != null ; parent = ( TableTreeNode ) parent . getParent ( ) ) { parent . nodeCount += delta ; }
public class HtmlEscape { /** * Perform an HTML 4 level 1 ( XML - style ) < strong > escape < / strong > operation on a < tt > char [ ] < / tt > input . * < em > Level 1 < / em > means this method will only escape the five markup - significant characters : * < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > . It is called * < em > XML - style < / em > in order to link it with JSP ' s < tt > escapeXml < / tt > attribute in JSTL ' s * < tt > & lt ; c : out . . . / & gt ; < / tt > tags . * Note this method may < strong > not < / strong > produce the same results as * { @ link # escapeHtml5Xml ( char [ ] , int , int , java . io . Writer ) } because it will escape the apostrophe as * < tt > & amp ; # 39 ; < / tt > , whereas in HTML5 there is a specific NCR for such character ( < tt > & amp ; apos ; < / tt > ) . * This method calls { @ link # escapeHtml ( char [ ] , int , int , java . io . Writer , HtmlEscapeType , HtmlEscapeLevel ) } * with the following preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . html . HtmlEscapeType # HTML4 _ NAMED _ REFERENCES _ DEFAULT _ TO _ DECIMAL } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . html . HtmlEscapeLevel # LEVEL _ 1 _ ONLY _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param text the < tt > char [ ] < / tt > to be escaped . * @ param offset the position in < tt > text < / tt > at which the escape operation should start . * @ param len the number of characters in < tt > text < / tt > that should be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ throws IOException if an input / output exception occurs */ public static void escapeHtml4Xml ( final char [ ] text , final int offset , final int len , final Writer writer ) throws IOException { } }
escapeHtml ( text , offset , len , writer , HtmlEscapeType . HTML4_NAMED_REFERENCES_DEFAULT_TO_DECIMAL , HtmlEscapeLevel . LEVEL_1_ONLY_MARKUP_SIGNIFICANT ) ;
public class StandardMessageResponseData { /** * Init Method . */ public void init ( MessageDataParent messageDataParent , String strKey ) { } }
if ( strKey == null ) strKey = STANDARD_MESSAGE_RESPONSE ; super . init ( messageDataParent , strKey ) ;
public class AllowedComponentsProviderImpl { /** * Get all allowed components for a template ( not respecting any path constraints ) * @ param pageComponentPath Path of template ' s page component * @ return Set of component paths ( absolute resource types ) */ @ Override public @ NotNull Set < String > getAllowedComponentsForTemplate ( @ NotNull String pageComponentPath , @ NotNull ResourceResolver resolver ) { } }
Resource pageComponentResource = resolver . getResource ( pageComponentPath ) ; if ( pageComponentResource != null ) { Iterable < ParsysConfig > parSysConfigs = parsysConfigManager . getParsysConfigs ( pageComponentResource . getPath ( ) , resolver ) ; SortedSet < String > allowedChildren = new TreeSet < > ( ) ; for ( ParsysConfig parSysConfig : parSysConfigs ) { allowedChildren . addAll ( parSysConfig . getAllowedChildren ( ) ) ; } return allowedChildren ; } // fallback return ImmutableSet . of ( ) ;
public class MenuDrawer { /** * Returns the start position of the indicator . * @ return The start position of the indicator . */ private int getIndicatorStartPos ( ) { } }
switch ( getPosition ( ) ) { case TOP : return mIndicatorClipRect . left ; case RIGHT : return mIndicatorClipRect . top ; case BOTTOM : return mIndicatorClipRect . left ; default : return mIndicatorClipRect . top ; }
public class LessModuleBuilder { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . core . impl . modulebuilder . text . TextModuleBuilder # build ( java . lang . String , com . ibm . jaggr . core . resource . IResource , javax . servlet . http . HttpServletRequest , java . util . List ) */ @ Override public ModuleBuild build ( String mid , IResource resource , HttpServletRequest request , List < ICacheKeyGenerator > inKeyGens ) throws Exception { } }
// Manage life span of thread locals used by this module builder if ( isFeatureDependent ) { threadLocalRequest . set ( request ) ; threadLocalDependentFeatures . set ( null ) ; } try { return super . build ( mid , resource , request , inKeyGens ) ; } finally { if ( isFeatureDependent ) { threadLocalRequest . remove ( ) ; threadLocalDependentFeatures . remove ( ) ; } }
public class CdnConfigurationServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . admanager . axis . v201808 . CdnConfigurationServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . admanager . axis . v201808 . CdnConfigurationServiceSoapBindingStub _stub = new com . google . api . ads . admanager . axis . v201808 . CdnConfigurationServiceSoapBindingStub ( new java . net . URL ( CdnConfigurationServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getCdnConfigurationServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class OjbTagsHandler { /** * Processes the template for the object cache of the current class definition . * @ param template The template * @ param attributes The attributes of the tag * @ exception XDocletException if an error occurs * @ doc . tag type = " block " */ public void forObjectCache ( String template , Properties attributes ) throws XDocletException { } }
_curObjectCacheDef = _curClassDef . getObjectCache ( ) ; if ( _curObjectCacheDef != null ) { generate ( template ) ; _curObjectCacheDef = null ; }
public class SessionResource { /** * Retrieves a resource representing the UserContext associated with the * AuthenticationProvider having the given identifier . * @ param authProviderIdentifier * The unique identifier of the AuthenticationProvider associated with * the UserContext being retrieved . * @ return * A resource representing the UserContext associated with the * AuthenticationProvider having the given identifier . * @ throws GuacamoleException * If the AuthenticationProvider identifier is invalid . */ @ Path ( "data/{dataSource}" ) public UserContextResource getUserContextResource ( @ PathParam ( "dataSource" ) String authProviderIdentifier ) throws GuacamoleException { } }
// Pull UserContext defined by the given auth provider identifier UserContext userContext = session . getUserContext ( authProviderIdentifier ) ; // Return a resource exposing the retrieved UserContext return userContextResourceFactory . create ( userContext ) ;
public class ErrorUnmarshallingHandler { /** * An exception was propagated from further up the pipeline ( probably an IO exception of some sort ) . Notify the handler and * kill the connection . */ @ Override public void exceptionCaught ( final ChannelHandlerContext ctx , final Throwable cause ) throws Exception { } }
if ( ! notifiedOnFailure ) { notifiedOnFailure = true ; try { responseHandler . onFailure ( new SdkClientException ( "Unable to execute HTTP request." , cause ) ) ; } finally { ctx . channel ( ) . close ( ) ; } }
public class AbstractMemberWriter { /** * Add the modifier for the member . * @ param member the member to add the type for * @ param code the content tree to which the modified will be added */ private void addModifier ( ProgramElementDoc member , Content code ) { } }
if ( member . isProtected ( ) ) { code . addContent ( "protected " ) ; } else if ( member . isPrivate ( ) ) { code . addContent ( "private " ) ; } else if ( ! member . isPublic ( ) ) { // Package private code . addContent ( configuration . getText ( "doclet.Package_private" ) ) ; code . addContent ( " " ) ; } if ( member . isMethod ( ) ) { if ( ! ( member . containingClass ( ) . isInterface ( ) ) && ( ( MethodDoc ) member ) . isAbstract ( ) ) { code . addContent ( "abstract " ) ; } // This check for isDefault ( ) and the default modifier needs to be // added for it to appear on the " Modifier and Type " column in the // method summary section . Once the default modifier is added // to the Modifier list on DocEnv and once it is updated to use the // javax . lang . model . element . Modifier , we will need to remove this . if ( ( ( MethodDoc ) member ) . isDefault ( ) ) { code . addContent ( "default " ) ; } } if ( member . isStatic ( ) ) { code . addContent ( "static " ) ; }
public class AppServiceEnvironmentsInner { /** * Get the used , available , and total worker capacity an App Service Environment . * Get the used , available , and total worker capacity an App Service Environment . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; StampCapacityInner & gt ; object */ public Observable < ServiceResponse < Page < StampCapacityInner > > > listCapacitiesNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return listCapacitiesNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < StampCapacityInner > > , Observable < ServiceResponse < Page < StampCapacityInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < StampCapacityInner > > > call ( ServiceResponse < Page < StampCapacityInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listCapacitiesNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class BillingGroupMetadataMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BillingGroupMetadata billingGroupMetadata , ProtocolMarshaller protocolMarshaller ) { } }
if ( billingGroupMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( billingGroupMetadata . getCreationDate ( ) , CREATIONDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class NDArrayMath { /** * The number of vectors * in each slice of an ndarray . * @ param arr the array to * get the number * of vectors per slice for * @ return the number of vectors per slice */ public static long matricesPerSlice ( INDArray arr ) { } }
if ( arr . rank ( ) == 3 ) { return 1 ; } else if ( arr . rank ( ) > 3 ) { int ret = 1 ; for ( int i = 1 ; i < arr . rank ( ) - 2 ; i ++ ) { ret *= arr . size ( i ) ; } return ret ; } return arr . size ( - 2 ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcText ( ) { } }
if ( ifcTextEClass == null ) { ifcTextEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 874 ) ; } return ifcTextEClass ;
public class ResourceTracker { /** * Go through all the requested resources and find what needs to be released . * @ return */ public List < ResourceRequest > getResourcesToRelease ( ) { } }
List < ResourceRequest > release = new ArrayList < ResourceRequest > ( ) ; synchronized ( lockObject ) { for ( Integer requestId : setDifference ( requestedResources . keySet ( ) , requestMap . keySet ( ) ) ) { // We update the data structures right away . This assumes that the // caller will be able to release the resources . ResourceRequest req = requestedResources . remove ( requestId ) ; if ( req != null ) { release . add ( req ) ; LOG . info ( "Filing release for requestId: " + req . getId ( ) ) ; } } } return release ;
public class ModelSerializer { /** * Load a MultilayerNetwork model from a file * @ param path path to the model file , to get the computation graph from * @ return the loaded computation graph * @ throws IOException */ public static MultiLayerNetwork restoreMultiLayerNetwork ( @ NonNull String path ) throws IOException { } }
return restoreMultiLayerNetwork ( new File ( path ) , true ) ;
public class StringUtils { /** * Joins a collection of values into a delimited list . * @ param collection the collection of values * @ param delimiter the delimiter ( e . g . " , " ) * @ param sb the string builder to append onto * @ param < T > the value class */ public static < T > void join ( Collection < T > collection , String delimiter , StringBuilder sb ) { } }
join ( collection , delimiter , sb , new JoinCallback < T > ( ) { public void handle ( StringBuilder sb , T value ) { sb . append ( value ) ; } } ) ;
public class AbstractBuiltInGrammar { /** * a leading rule for performance reason is added to the tail */ public void addProduction ( Event event , Grammar grammar ) { } }
containers . add ( new SchemaLessProduction ( this , grammar , event , getNumberOfEvents ( ) ) ) ; // pre - calculate count for log2 ( Note : always 2nd level productions // available ) // Note : BuiltInDocContent and BuiltInFragmentContent do not use this // variable this . ec1Length = MethodsBag . getCodingLength ( containers . size ( ) + 1 ) ;
public class HeapCache { /** * Remove the object from the cache . */ @ Override public boolean removeIfEquals ( K key , V _value ) { } }
Entry e = lookupEntry ( key ) ; if ( e == null ) { metrics . peekMiss ( ) ; return false ; } synchronized ( e ) { e . waitForProcessing ( ) ; if ( e . isGone ( ) ) { metrics . peekMiss ( ) ; return false ; } boolean f = e . hasFreshData ( clock ) ; if ( f ) { if ( ! e . equalsValue ( _value ) ) { return false ; } } else { metrics . peekHitNotFresh ( ) ; return false ; } removeEntry ( e ) ; return f ; }
public class AbstractBeanJsonCreator { /** * Creates an implementation of { @ link AbstractBeanJsonSerializer } for the type given in * parameter * @ return the information about the created class * @ throws com . google . gwt . core . ext . UnableToCompleteException if any . * @ throws com . github . nmorel . gwtjackson . rebind . exception . UnsupportedTypeException if any . */ public final BeanJsonMapperInfo create ( ) throws UnableToCompleteException , UnsupportedTypeException { } }
final String simpleClassName = isSerializer ( ) ? mapperInfo . getSimpleSerializerClassName ( ) : mapperInfo . getSimpleDeserializerClassName ( ) ; PrintWriter printWriter = getPrintWriter ( mapperInfo . getPackageName ( ) , simpleClassName ) ; // the class already exists , no need to continue if ( printWriter == null ) { return mapperInfo ; } try { TypeSpec type = buildClass ( simpleClassName ) ; write ( mapperInfo . getPackageName ( ) , type , printWriter ) ; } finally { printWriter . close ( ) ; } return mapperInfo ;
public class CommerceAccountOrganizationRelUtil { /** * Returns an ordered range of all the commerce account organization rels . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceAccountOrganizationRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce account organization rels * @ param end the upper bound of the range of commerce account organization rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of commerce account organization rels */ public static List < CommerceAccountOrganizationRel > findAll ( int start , int end , OrderByComparator < CommerceAccountOrganizationRel > orderByComparator ) { } }
return getPersistence ( ) . findAll ( start , end , orderByComparator ) ;
public class MonitorEndpointHelper { /** * Verify access to a JDBC endpoint by verifying that a specified table is accessible . * @ param muleContext * @ param muleJdbcDataSourceName * @ param tableName */ public static String pingJdbcEndpoint ( MuleContext muleContext , String muleJdbcDataSourceName , String tableName ) { } }
DataSource ds = JdbcUtil . lookupDataSource ( muleContext , muleJdbcDataSourceName ) ; Connection c = null ; Statement s = null ; ResultSet rs = null ; try { c = ds . getConnection ( ) ; s = c . createStatement ( ) ; rs = s . executeQuery ( "select 1 from " + tableName ) ; } catch ( SQLException e ) { return ERROR_PREFIX + ": The table " + tableName + " was not found in the data source " + muleJdbcDataSourceName + ", reason: " + e . getMessage ( ) ; } finally { try { if ( rs != null ) rs . close ( ) ; } catch ( SQLException e ) { } try { if ( s != null ) s . close ( ) ; } catch ( SQLException e ) { } try { if ( c != null ) c . close ( ) ; } catch ( SQLException e ) { } } return OK_PREFIX + ": The table " + tableName + " was found in the data source " + muleJdbcDataSourceName ;
public class DeclaredDependencies { /** * Get the collection of direct dependencies included in this instance . * @ param includeUnsolved include dependencies that may need further * @ param includePresolved include the dependencies that do not need further resolution . * @ return the collection of direct dependencies included in this instance . */ public Collection < ArtifactSpec > getDirectDependencies ( boolean includeUnsolved , boolean includePresolved ) { } }
Set < ArtifactSpec > deps = new LinkedHashSet < > ( ) ; if ( includeUnsolved ) { deps . addAll ( getDirectDeps ( ) ) ; } if ( includePresolved ) { deps . addAll ( presolvedDependencies . getDirectDeps ( ) ) ; } return deps ;
public class NotEmptyAfterStripValidator { /** * { @ inheritDoc } check if given string is a not empty after strip . * @ see javax . validation . ConstraintValidator # isValid ( java . lang . Object , * javax . validation . ConstraintValidatorContext ) */ @ Override public final boolean isValid ( final Object pvalue , final ConstraintValidatorContext pcontext ) { } }
final String valueAsString = StringUtils . strip ( Objects . toString ( pvalue , null ) , stripcharacters ) ; return StringUtils . isNotEmpty ( valueAsString ) ;
public class InboundResourceAdapterImpl { /** * A validate method . Don ' t extending for the moment ValidatableMetadata * @ return true if Ra is valid , false in the other cases */ public boolean validationAsBoolean ( ) { } }
if ( this . getMessageadapter ( ) == null || this . getMessageadapter ( ) . getMessagelisteners ( ) == null || this . getMessageadapter ( ) . getMessagelisteners ( ) . isEmpty ( ) ) return false ; MessageListener mlmd = this . getMessageadapter ( ) . getMessagelisteners ( ) . get ( 0 ) ; if ( mlmd . getMessagelistenerType ( ) == null || mlmd . getActivationspec ( ) == null || mlmd . getActivationspec ( ) . getActivationspecClass ( ) == null ) return false ; return true ;
public class Token { /** * Creates a token that represents a symbol , using a library for the type . */ public static Token newSymbol ( String type , int startLine , int startColumn ) { } }
return new Token ( Types . lookupSymbol ( type ) , type , startLine , startColumn ) ;
public class SubWriterHolderWriter { /** * Get the summary table . * @ param mw the writer for the member being documented * @ param typeElement the te to be documented * @ param tableContents list of summary table contents * @ param showTabs true if the table needs to show tabs * @ return the content tree for the summary table */ public Content getSummaryTableTree ( AbstractMemberWriter mw , TypeElement typeElement , List < Content > tableContents , boolean showTabs ) { } }
Content caption ; if ( showTabs ) { caption = getTableCaption ( mw . methodTypes ) ; generateTableTabTypesScript ( mw . typeMap , mw . methodTypes , "methods" ) ; } else { caption = getTableCaption ( mw . getCaption ( ) ) ; } Content table = ( configuration . isOutputHtml5 ( ) ) ? HtmlTree . TABLE ( HtmlStyle . memberSummary , caption ) : HtmlTree . TABLE ( HtmlStyle . memberSummary , mw . getTableSummary ( ) , caption ) ; table . addContent ( getSummaryTableHeader ( mw . getSummaryTableHeader ( typeElement ) , "col" ) ) ; for ( Content tableContent : tableContents ) { table . addContent ( tableContent ) ; } return table ;
public class BotmReflectionUtil { /** * Get the public method . < br > * And it has the flexibly searching so you can specify types of sub - class to argTypes . < br > * But if overload methods exist , it returns the first - found method . < br > * And no cache so you should cache it yourself if you call several times . < br > * @ param clazz The type of class that defines the method . ( NotNull ) * @ param methodName The name of method . ( NotNull ) * @ param argTypes The type of argument . ( NotNull ) * @ return The instance of method . ( NullAllowed : if null , not found ) */ public static Method getPublicMethodFlexibly ( Class < ? > clazz , String methodName , Class < ? > [ ] argTypes ) { } }
assertObjectNotNull ( "clazz" , clazz ) ; assertStringNotNullAndNotTrimmedEmpty ( "methodName" , methodName ) ; return findMethod ( clazz , methodName , argTypes , VisibilityType . PUBLIC , true ) ;
public class LdapUtils { /** * Execute a password modify operation . * @ param currentDn the current dn * @ param connectionFactory the connection factory * @ param oldPassword the old password * @ param newPassword the new password * @ param type the type * @ return true / false */ public static boolean executePasswordModifyOperation ( final String currentDn , final ConnectionFactory connectionFactory , final String oldPassword , final String newPassword , final AbstractLdapProperties . LdapType type ) { } }
try ( val modifyConnection = createConnection ( connectionFactory ) ) { if ( ! modifyConnection . getConnectionConfig ( ) . getUseSSL ( ) && ! modifyConnection . getConnectionConfig ( ) . getUseStartTLS ( ) ) { LOGGER . warn ( "Executing password modification op under a non-secure LDAP connection; " + "To modify password attributes, the connection to the LDAP server SHOULD be secured and/or encrypted." ) ; } if ( type == AbstractLdapProperties . LdapType . AD ) { LOGGER . debug ( "Executing password modification op for active directory based on " + "[https://support.microsoft.com/en-us/kb/269190]" ) ; val operation = new ModifyOperation ( modifyConnection ) ; val response = operation . execute ( new ModifyRequest ( currentDn , new AttributeModification ( AttributeModificationType . REPLACE , new UnicodePwdAttribute ( newPassword ) ) ) ) ; LOGGER . debug ( "Result code [{}], message: [{}]" , response . getResult ( ) , response . getMessage ( ) ) ; return response . getResultCode ( ) == ResultCode . SUCCESS ; } LOGGER . debug ( "Executing password modification op for generic LDAP" ) ; val operation = new PasswordModifyOperation ( modifyConnection ) ; val response = operation . execute ( new PasswordModifyRequest ( currentDn , StringUtils . isNotBlank ( oldPassword ) ? new Credential ( oldPassword ) : null , new Credential ( newPassword ) ) ) ; LOGGER . debug ( "Result code [{}], message: [{}]" , response . getResult ( ) , response . getMessage ( ) ) ; return response . getResultCode ( ) == ResultCode . SUCCESS ; } catch ( final LdapException e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } return false ;
public class XMLDocument { /** * Create a document from a XMLStreamEvents . */ public static XMLDocument create ( XMLStreamEventsSync stream ) throws XMLException , IOException { } }
XMLDocument doc = new XMLDocument ( ) ; do { switch ( stream . event . type ) { case DOCTYPE : if ( doc . docType != null ) throw new XMLException ( stream . getPosition ( ) , "Unexpected element " , "DOCTYPE" ) ; doc . docType = new XMLDocumentType ( doc , stream . event . text . asString ( ) , stream . event . publicId != null ? stream . event . publicId . asString ( ) : null , stream . event . system != null ? stream . event . system . asString ( ) : null ) ; break ; case START_ELEMENT : if ( doc . root != null ) throw new XMLException ( stream . getPosition ( ) , "Unexpected element " , stream . event . text . asString ( ) ) ; doc . root = XMLElement . create ( doc , stream ) ; break ; default : break ; } try { stream . next ( ) ; } catch ( EOFException e ) { break ; } } while ( true ) ; return doc ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public FontResolutionRPuBase createFontResolutionRPuBaseFromString ( EDataType eDataType , String initialValue ) { } }
FontResolutionRPuBase result = FontResolutionRPuBase . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class WSJdbcResultSet { /** * Perform any wrapper - specific close logic . This method is called by the default * WSJdbcObject close method . * @ param closeWrapperOnly boolean flag to indicate that only wrapper - closure activities * should be performed , but close of the underlying object is unnecessary . * @ return SQLException the first error to occur while closing the object . */ protected SQLException closeWrapper ( boolean closeWrapperOnly ) { } }
// Indicate the result is closed by setting the parent object ' s result set to null . // This will allow us to be garbage collected . // - Since we use childWrapper for the first result set , // so we first compare childWrapper object . if ( parentWrapper . childWrapper == this ) { parentWrapper . childWrapper = null ; } else { // This result set must be stored in childWrappers . parentWrapper . childWrappers . remove ( this ) ; } if ( closeWrapperOnly ) { // skip close of implementation object . ( Statement . getMoreResults will do it . ) if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "closeWrapper skipping close of ResultSet implementation object" ) ; } } else { try // Close the JDBC driver ResultSet implementation object . { rsetImpl . close ( ) ; } catch ( SQLException closeX ) { FFDCFilter . processException ( closeX , "com.ibm.ws.rsadapter.jdbc.WSJdbcResultSet.closeWrapper" , "246" , this ) ; rsetImpl = null ; return WSJdbcUtil . mapException ( this , closeX ) ; } } // Check if the parent object is a WSJdbcStatement , and closeOnCompletion is enabled if ( parentWrapper != null && parentWrapper instanceof WSJdbcStatement && ( ( WSJdbcStatement ) parentWrapper ) . closeOnCompletion ) { WSJdbcStatement parentStmt = ( WSJdbcStatement ) parentWrapper ; // If the parent Statement has no more child objects , close the Statement if ( parentStmt . childWrapper == null && ( parentStmt . childWrappers == null || parentStmt . childWrappers . isEmpty ( ) ) ) { try { parentStmt . close ( ) ; } catch ( SQLException closeX ) { FFDCFilter . processException ( closeX , "com.ibm.ws.rsadapter.jdbc.WSJdbcResultSet.closeWrapper" , "281" , this ) ; rsetImpl = null ; return WSJdbcUtil . mapException ( this , closeX ) ; } } } rsetImpl = null ; return null ;
public class DecimalStyle { /** * Returns a copy of the info with a new character that represents the decimal point . * The character used to represent a decimal point may vary by culture . * This method specifies the character to use . * @ param decimalSeparator the character for the decimal point * @ return a copy with a new character that represents the decimal point , not null */ public DecimalStyle withDecimalSeparator ( char decimalSeparator ) { } }
if ( decimalSeparator == this . decimalSeparator ) { return this ; } return new DecimalStyle ( zeroDigit , positiveSign , negativeSign , decimalSeparator ) ;
public class CreateRateBasedRuleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateRateBasedRuleRequest createRateBasedRuleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createRateBasedRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createRateBasedRuleRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createRateBasedRuleRequest . getMetricName ( ) , METRICNAME_BINDING ) ; protocolMarshaller . marshall ( createRateBasedRuleRequest . getRateKey ( ) , RATEKEY_BINDING ) ; protocolMarshaller . marshall ( createRateBasedRuleRequest . getRateLimit ( ) , RATELIMIT_BINDING ) ; protocolMarshaller . marshall ( createRateBasedRuleRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ConcurrentConveyor { /** * Drains no more than { @ code limit } items from the queue at the supplied * index into the supplied collection . * @ return the number of items drained */ public final int drainTo ( int queueIndex , Collection < ? super E > drain , int limit ) { } }
return drain ( queues [ queueIndex ] , drain , limit ) ;
public class CmsXmlContentProperty { /** * Merges this object with another one containing default values . < p > * This method does not modify this object or the object passed as a parameter . * The resulting object ' s fields will be filled with the values from the default if they ' re null in this object . * @ param defaults the object with the defaults * @ return the result of merging this object with the defaults */ public CmsXmlContentProperty mergeDefaults ( CmsXmlContentProperty defaults ) { } }
return new CmsXmlContentProperty ( firstNotNull ( m_name , defaults . m_name ) , firstNotNull ( m_type , defaults . m_type ) , firstNotNull ( m_visibility , defaults . m_visibility ) , firstNotNull ( m_widget , defaults . m_widget ) , firstNotNull ( m_widgetConfiguration , defaults . m_widgetConfiguration ) , firstNotNull ( m_ruleRegex , defaults . m_ruleRegex ) , firstNotNull ( m_ruleType , defaults . m_ruleType ) , firstNotNull ( m_default , defaults . m_default ) , firstNotNull ( m_niceName , defaults . m_niceName ) , firstNotNull ( m_description , defaults . m_description ) , firstNotNull ( m_error , defaults . m_error ) , firstNotNull ( m_preferFolder , defaults . m_preferFolder ) ) ;
public class audit_log { /** * Use this API to fetch filtered set of audit _ log resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static audit_log [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
audit_log obj = new audit_log ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; audit_log [ ] response = ( audit_log [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class JWT { /** * 基于RS256算法验证 * @ throws InvalidTokenException 如果签名的格式不正确 */ private static boolean verifySignature ( String content , String signed , RSAPublicKey pk ) throws InvalidTokenException { } }
try { byte [ ] signedData = Base64 . urlDecode ( signed ) ; byte [ ] contentData = content . getBytes ( JWT . UTF_8 ) ; Signature signature = Signature . getInstance ( ALG_SHA256WITHRSA ) ; signature . initVerify ( pk ) ; signature . update ( contentData ) ; return signature . verify ( signedData ) ; } catch ( SignatureException e ) { throw new InvalidTokenException ( "Invalid signature" , e ) ; } catch ( Exception e ) { throw new RuntimeException ( "Verify signature error" , e ) ; }
public class VMCommandLine { /** * Run a new VM with the given class path . * @ param classToLaunch is the class to launch . * @ param classpath is the class path to use . * @ param additionalParams is the list of additional parameters * @ return the process that is running the new virtual machine , neither < code > null < / code > * @ throws IOException when a IO error occurs . * @ since 6.2 */ public static Process launchVMWithClassPath ( String classToLaunch , File [ ] classpath , String ... additionalParams ) throws IOException { } }
final StringBuilder b = new StringBuilder ( ) ; for ( final File f : classpath ) { if ( b . length ( ) > 0 ) { b . append ( File . pathSeparator ) ; } b . append ( f . getAbsolutePath ( ) ) ; } return launchVMWithClassPath ( classToLaunch , b . toString ( ) , additionalParams ) ;
public class EstimateKeywordTraffic { /** * Returns the mean of the { @ code microAmount } of the two Money values if neither is null , else * returns null . */ private static Double calculateMean ( Money minMoney , Money maxMoney ) { } }
if ( minMoney == null || maxMoney == null ) { return null ; } return calculateMean ( minMoney . getMicroAmount ( ) , maxMoney . getMicroAmount ( ) ) ;
public class Reaction { /** * Sets the coefficient of the products . * @ param coefficients An array of double ' s containing the coefficients of the products * @ return true if coefficients have been set . * @ see # getProductCoefficients */ @ Override public boolean setProductCoefficients ( Double [ ] coefficients ) { } }
boolean result = products . setMultipliers ( coefficients ) ; notifyChanged ( ) ; return result ;
public class RoseScanner { /** * 将要被扫描的普通类地址 ( 比如WEB - INF / classes或target / classes之类的地址 ) * @ param resourceLoader * @ return * @ throws IOException * @ throws URISyntaxException */ public List < ResourceRef > getClassesFolderResources ( ) throws IOException { } }
if ( classesFolderResources == null ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( "[classesFolder] start to found available classes folders ..." ) ; } List < ResourceRef > classesFolderResources = new ArrayList < ResourceRef > ( ) ; Enumeration < URL > founds = resourcePatternResolver . getClassLoader ( ) . getResources ( "" ) ; while ( founds . hasMoreElements ( ) ) { URL urlObject = founds . nextElement ( ) ; if ( ! "file" . equals ( urlObject . getProtocol ( ) ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "[classesFolder] Ignored classes folder because " + "not a file protocol url: " + urlObject ) ; } continue ; } String path = urlObject . getPath ( ) ; Assert . isTrue ( path . endsWith ( "/" ) ) ; // if ( ! path . endsWith ( " / classes / " ) & & ! path . endsWith ( " / bin / " ) ) { // if ( logger . isInfoEnabled ( ) ) { // logger . info ( " [ classesFolder ] Ignored classes folder because " // + " not ends with ' / classes / ' or ' / bin / ' : " + urlObject ) ; // continue ; File file ; try { file = new File ( urlObject . toURI ( ) ) ; } catch ( URISyntaxException e ) { throw new IOException ( e ) ; } if ( file . isFile ( ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "[classesFolder] Ignored because not a directory: " + urlObject ) ; } continue ; } Resource resource = new FileSystemResource ( file ) ; ResourceRef resourceRef = ResourceRef . toResourceRef ( resource ) ; if ( classesFolderResources . contains ( resourceRef ) ) { // 删除重复的地址 if ( logger . isDebugEnabled ( ) ) { logger . debug ( "[classesFolder] remove replicated classes folder: " + resourceRef ) ; } } else { classesFolderResources . add ( resourceRef ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "[classesFolder] add classes folder: " + resourceRef ) ; } } } // 删除含有一个地址包含另外一个地址的 Collections . sort ( classesFolderResources ) ; List < ResourceRef > toRemove = new LinkedList < ResourceRef > ( ) ; for ( int i = 0 ; i < classesFolderResources . size ( ) ; i ++ ) { ResourceRef ref = classesFolderResources . get ( i ) ; String refURI = ref . getResource ( ) . getURI ( ) . toString ( ) ; for ( int j = i + 1 ; j < classesFolderResources . size ( ) ; j ++ ) { ResourceRef refj = classesFolderResources . get ( j ) ; String refjURI = refj . getResource ( ) . getURI ( ) . toString ( ) ; if ( refURI . startsWith ( refjURI ) ) { toRemove . add ( refj ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[classesFolder] remove wrapper classes folder: " + refj ) ; } } else if ( refjURI . startsWith ( refURI ) && refURI . length ( ) != refjURI . length ( ) ) { toRemove . add ( ref ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[classesFolder] remove wrapper classes folder: " + ref ) ; } } } } classesFolderResources . removeAll ( toRemove ) ; this . classesFolderResources = new ArrayList < ResourceRef > ( classesFolderResources ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[classesFolder] found " + classesFolderResources . size ( ) + " classes folders: " + classesFolderResources ) ; } } else { if ( logger . isInfoEnabled ( ) ) { logger . info ( "[classesFolder] found cached " + classesFolderResources . size ( ) + " classes folders: " + classesFolderResources ) ; } } return Collections . unmodifiableList ( classesFolderResources ) ;
public class Authentication { /** * Execute a login attempt , given a user name and password . If a user has already logged in , a logout will be called * first . * @ param userId * The unique user ID . * @ param password * The user ' s password . * @ param callback * A possible callback to be executed when the login has been done ( successfully or not ) . Can be null . */ public void login ( final String userId , final String password , final BooleanCallback callback ) { } }
if ( this . userId == null ) { loginUser ( userId , password , callback ) ; } else if ( this . userId . equals ( userId ) ) { // Already logged in . . . return ; } else { GwtCommand command = new GwtCommand ( logoutCommandName ) ; GwtCommandDispatcher . getInstance ( ) . execute ( command , new AbstractCommandCallback < SuccessCommandResponse > ( ) { public void execute ( SuccessCommandResponse response ) { if ( response . isSuccess ( ) ) { userToken = null ; Authentication . this . userId = null ; manager . fireEvent ( new LogoutSuccessEvent ( ) ) ; loginUser ( userId , password , callback ) ; } else { manager . fireEvent ( new LogoutFailureEvent ( ) ) ; } } } ) ; }
public class UriUtils { /** * Creates a new URL string from the specified base URL and parameters * encoded in non - form encoding , www - urlencoded . * @ param baseUrl The base URL excluding parameters . * @ param params The parameters . * @ return The full URL string . */ public static String newWwwUrlEncodedUrl ( final String baseUrl , final Collection < Pair < String , String > > params ) { } }
final StringBuilder sb = new StringBuilder ( ) ; sb . append ( baseUrl ) ; sb . append ( getUrlParameters ( params , false ) ) ; return sb . toString ( ) ;
public class SofaResteasyClientBuilder { /** * 注册jaxrs Provider * @ return SofaResteasyClientBuilder */ public SofaResteasyClientBuilder registerProvider ( ) { } }
ResteasyProviderFactory providerFactory = getProviderFactory ( ) ; // 注册内置 Set < Class > internalProviderClasses = JAXRSProviderManager . getInternalProviderClasses ( ) ; if ( CommonUtils . isNotEmpty ( internalProviderClasses ) ) { for ( Class providerClass : internalProviderClasses ) { providerFactory . register ( providerClass ) ; } } // 注册自定义 Set < Object > customProviderInstances = JAXRSProviderManager . getCustomProviderInstances ( ) ; if ( CommonUtils . isNotEmpty ( customProviderInstances ) ) { for ( Object provider : customProviderInstances ) { PropertyInjector propertyInjector = providerFactory . getInjectorFactory ( ) . createPropertyInjector ( JAXRSProviderManager . getTargetClass ( provider ) , providerFactory ) ; propertyInjector . inject ( provider ) ; providerFactory . registerProviderInstance ( provider ) ; } } return this ;
public class EigenPowerMethod_DDRM { /** * Test for convergence by seeing if the element with the largest change * is smaller than the tolerance . In some test cases it alternated between * the + and - values of the eigen vector . When this happens it seems to have " converged " * to a non - dominant eigen vector . At least in the case I looked at . I haven ' t devoted * a lot of time into this issue . . . */ private boolean checkConverged ( DMatrixRMaj A ) { } }
double worst = 0 ; double worst2 = 0 ; for ( int j = 0 ; j < A . numRows ; j ++ ) { double val = Math . abs ( q2 . data [ j ] - q0 . data [ j ] ) ; if ( val > worst ) worst = val ; val = Math . abs ( q2 . data [ j ] + q0 . data [ j ] ) ; if ( val > worst2 ) worst2 = val ; } // swap vectors DMatrixRMaj temp = q0 ; q0 = q2 ; q2 = temp ; if ( worst < tol ) return true ; else if ( worst2 < tol ) return true ; else return false ;
public class ColumnMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Column column , ProtocolMarshaller protocolMarshaller ) { } }
if ( column == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( column . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( column . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( column . getComment ( ) , COMMENT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JvmFormalParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setParameterType ( JvmTypeReference newParameterType ) { } }
if ( newParameterType != parameterType ) { NotificationChain msgs = null ; if ( parameterType != null ) msgs = ( ( InternalEObject ) parameterType ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - TypesPackage . JVM_FORMAL_PARAMETER__PARAMETER_TYPE , null , msgs ) ; if ( newParameterType != null ) msgs = ( ( InternalEObject ) newParameterType ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - TypesPackage . JVM_FORMAL_PARAMETER__PARAMETER_TYPE , null , msgs ) ; msgs = basicSetParameterType ( newParameterType , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , TypesPackage . JVM_FORMAL_PARAMETER__PARAMETER_TYPE , newParameterType , newParameterType ) ) ;
public class GetCampaignsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetCampaignsRequest getCampaignsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getCampaignsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCampaignsRequest . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( getCampaignsRequest . getPageSize ( ) , PAGESIZE_BINDING ) ; protocolMarshaller . marshall ( getCampaignsRequest . getToken ( ) , TOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AmazonApiGatewayV2Client { /** * Gets an Integration . * @ param getIntegrationRequest * @ return Result of the GetIntegration operation returned by the service . * @ throws NotFoundException * The resource specified in the request was not found . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ sample AmazonApiGatewayV2 . GetIntegration */ @ Override public GetIntegrationResult getIntegration ( GetIntegrationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetIntegration ( request ) ;
public class CloseableIterables { /** * Returns the elements of { @ code unfiltered } that satisfy a predicate . The * resulting closeable iterable ' s iterator does not support { @ code remove ( ) } . */ public static < T > CloseableIterable < T > filter ( final CloseableIterable < T > iterable , final Predicate < ? super T > filter ) { } }
return wrap ( Iterables . filter ( iterable , filter :: test ) , iterable ) ;
public class BaseAbilityBot { /** * Invokes the method and retrieves its return { @ link Reply } . * @ param obj an bot or extension that this method is invoked with * @ return a { @ link Function } which returns the { @ link Reply } returned by the given method */ private Function < ? super Method , AbilityExtension > returnExtension ( Object obj ) { } }
return method -> { try { return ( AbilityExtension ) method . invoke ( obj ) ; } catch ( IllegalAccessException | InvocationTargetException e ) { BotLogger . error ( "Could not add ability extension" , TAG , e ) ; throw propagate ( e ) ; } } ;
public class MiniParser { /** * use */ private List < String > splitInternal ( final String input , final boolean splitOnWhitespace , final char separator , final String separatorSet , final int maxSegments ) { } }
if ( input == null ) { return null ; } try { final List < String > segments = new ArrayList < String > ( ) ; StringBuilder buffer = new StringBuilder ( ) ; for ( int index = 0 ; index < input . length ( ) ; index ++ ) { final char c = input . charAt ( index ) ; boolean separatedByWhitespace = false ; if ( splitOnWhitespace ) { for ( ; index < input . length ( ) && Character . isWhitespace ( input . charAt ( index ) ) ; index ++ ) { separatedByWhitespace = true ; } if ( separatedByWhitespace ) { index -- ; } } final boolean separates = separatedByWhitespace || ( separatorSet != null ? separatorSet . indexOf ( c ) != - 1 : c == separator ) ; // in case we are not already in the last segment and there is // an // unsecaped , unquoted separator , this segment is now done if ( segments . size ( ) != maxSegments - 1 && separates && ! isEscaped ( ) ) { finish ( segments , buffer ) ; buffer = new StringBuilder ( ) ; } else { append ( buffer , c ) ; } } if ( ! splitOnWhitespace || buffer . length ( ) != 0 ) { finish ( segments , buffer ) ; } return segments ; } finally { escaped = false ; quoted = false ; }
public class MACNumber { /** * Try to determine the primary ethernet address of the machine and * replies the associated internet addresses . * @ return the internet addresses of the primary network interface . */ @ Pure public static Collection < InetAddress > getPrimaryAdapterAddresses ( ) { } }
final Enumeration < NetworkInterface > interfaces ; try { interfaces = NetworkInterface . getNetworkInterfaces ( ) ; } catch ( SocketException exception ) { return Collections . emptyList ( ) ; } if ( interfaces != null ) { NetworkInterface inter ; while ( interfaces . hasMoreElements ( ) ) { inter = interfaces . nextElement ( ) ; try { final byte [ ] addr = inter . getHardwareAddress ( ) ; if ( addr != null ) { final Collection < InetAddress > inetList = new ArrayList < > ( ) ; final Enumeration < InetAddress > inets = inter . getInetAddresses ( ) ; while ( inets . hasMoreElements ( ) ) { inetList . add ( inets . nextElement ( ) ) ; } return inetList ; } } catch ( SocketException exception ) { // Continue to the next loop . } } } return Collections . emptyList ( ) ;
public class HamtPMap { /** * Compare two unsigned integers . */ private static int compareUnsigned ( int left , int right ) { } }
// NOTE : JDK 7 does not have a built - in operation for this , other than casting to longs . // In JDK 8 it ' s just Integer . compareUnsigned ( left , right ) . For now we emulate it // by shifting the sign bit away , with a fallback second compare only if needed . int diff = ( left >>> 2 ) - ( right >>> 2 ) ; return diff != 0 ? diff : ( left & 3 ) - ( right & 3 ) ;
public class AddressDivision { /** * when divisionPrefixLen is null , isAutoSubnets has no effect */ protected static boolean isMaskCompatibleWithRange ( long value , long upperValue , long maskValue , long maxValue ) { } }
if ( value == upperValue || maskValue == maxValue || maskValue == 0 ) { return true ; } // algorithm : // here we find the highest bit that is part of the range , highestDifferingBitInRange ( ie changes from lower to upper ) // then we find the highest bit in the mask that is 1 that is the same or below highestDifferingBitInRange ( if such a bit exists ) // this gives us the highest bit that is part of the masked range ( ie changes from lower to upper after applying the mask ) // if this latter bit exists , then any bit below it in the mask must be 1 to include the entire range . long differing = value ^ upperValue ; boolean foundDiffering = ( differing != 0 ) ; boolean differingIsLowestBit = ( differing == 1 ) ; if ( foundDiffering && ! differingIsLowestBit ) { int highestDifferingBitInRange = Long . numberOfLeadingZeros ( differing ) ; long maskMask = ~ 0L >>> highestDifferingBitInRange ; long differingMasked = maskValue & maskMask ; foundDiffering = ( differingMasked != 0 ) ; differingIsLowestBit = ( differingMasked == 1 ) ; if ( foundDiffering && ! differingIsLowestBit ) { // anything below highestDifferingBitMasked in the mask must be ones // Also , if we have masked out any 1 bit in the original , then anything that we do not mask out that follows must be all 1s int highestDifferingBitMasked = Long . numberOfLeadingZeros ( differingMasked ) ; long hostMask = ~ 0L >>> ( highestDifferingBitMasked + 1 ) ; // for the first mask bit that is 1 , all bits that follow must also be 1 if ( ( maskValue & hostMask ) != hostMask ) { // check if all ones below return false ; } if ( highestDifferingBitMasked > highestDifferingBitInRange ) { // We have masked out a 1 bit , so we need to check that all bits in upper value that we do not mask out are also 1 bits , otherwise we end up missing values in the masked range // This check is unnecessary for prefix - length subnets , only non - standard ranges might fail this check . // For instance , if we have range 0000 to 1010 // and we mask upper and lower with 0111 // we get 0000 to 0010 , but 0111 was in original range , and the mask of that value retains that value // so that value needs to be in final range , and it ' s not . // What went wrong is that we masked out the top bit , and any other bit that is not masked out must be 1. // To work , our original range needed to be 0000 to 1111 , with the three 1s following the first masked - out 1 long hostMaskUpper = ~ 0L >>> highestDifferingBitMasked ; if ( ( upperValue & hostMaskUpper ) != hostMaskUpper ) { return false ; } } } } return true ;
public class UIComponent { /** * < p class = " changed _ added _ 2_1 " > For components that need to support * the concept of transient state , this method will restore any * state saved on a prior call to { @ link # saveTransientState } . < / p > * @ since 2.1 */ public void restoreTransientState ( FacesContext context , Object state ) { } }
boolean forceCreate = ( state != null ) ; TransientStateHelper helper = getTransientStateHelper ( forceCreate ) ; if ( helper != null ) { helper . restoreTransientState ( context , state ) ; }
public class CollectorSampler { /** * Returns true if spans with this trace ID should be recorded to storage . * < p > Zipkin v1 allows storage - layer sampling , which can help prevent spikes in traffic from * overloading the system . Debug spans are always stored . * < p > This uses only the lower 64 bits of the trace ID as instrumentation still send mixed trace * ID width . * @ param hexTraceId the lower 64 bits of the span ' s trace ID are checked against the boundary * @ param debug when true , always passes sampling */ public boolean isSampled ( String hexTraceId , boolean debug ) { } }
if ( Boolean . TRUE . equals ( debug ) ) return true ; long traceId = HexCodec . lowerHexToUnsignedLong ( hexTraceId ) ; // The absolute value of Long . MIN _ VALUE is larger than a long , so Math . abs returns identity . // This converts to MAX _ VALUE to avoid always dropping when traceId = = Long . MIN _ VALUE long t = traceId == Long . MIN_VALUE ? Long . MAX_VALUE : Math . abs ( traceId ) ; return t <= boundary ( ) ;
public class DefaultTerminalImpl { /** * Method used to construct value from tag and length * @ param pTagAndLength * tag and length value * @ return tag value in byte */ @ Override public byte [ ] constructValue ( final TagAndLength pTagAndLength ) { } }
byte ret [ ] = new byte [ pTagAndLength . getLength ( ) ] ; byte val [ ] = null ; if ( pTagAndLength . getTag ( ) == EmvTags . TERMINAL_TRANSACTION_QUALIFIERS ) { TerminalTransactionQualifiers terminalQual = new TerminalTransactionQualifiers ( ) ; terminalQual . setContactlessVSDCsupported ( true ) ; terminalQual . setContactEMVsupported ( true ) ; terminalQual . setMagneticStripeSupported ( true ) ; terminalQual . setContactlessEMVmodeSupported ( true ) ; terminalQual . setOnlinePINsupported ( true ) ; terminalQual . setReaderIsOfflineOnly ( false ) ; terminalQual . setSignatureSupported ( true ) ; terminalQual . setContactChipOfflinePINsupported ( true ) ; terminalQual . setIssuerUpdateProcessingSupported ( true ) ; terminalQual . setConsumerDeviceCVMsupported ( true ) ; val = terminalQual . getBytes ( ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TERMINAL_COUNTRY_CODE ) { val = BytesUtils . fromString ( StringUtils . leftPad ( String . valueOf ( countryCode . getNumeric ( ) ) , pTagAndLength . getLength ( ) * 2 , "0" ) ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TRANSACTION_CURRENCY_CODE ) { val = BytesUtils . fromString ( StringUtils . leftPad ( String . valueOf ( CurrencyEnum . find ( countryCode , CurrencyEnum . EUR ) . getISOCodeNumeric ( ) ) , pTagAndLength . getLength ( ) * 2 , "0" ) ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TRANSACTION_DATE ) { SimpleDateFormat sdf = new SimpleDateFormat ( "yyMMdd" ) ; val = BytesUtils . fromString ( sdf . format ( new Date ( ) ) ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TRANSACTION_TYPE || pTagAndLength . getTag ( ) == EmvTags . TERMINAL_TRANSACTION_TYPE ) { val = new byte [ ] { ( byte ) TransactionTypeEnum . PURCHASE . getKey ( ) } ; } else if ( pTagAndLength . getTag ( ) == EmvTags . AMOUNT_AUTHORISED_NUMERIC ) { val = BytesUtils . fromString ( "01" ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TERMINAL_TYPE ) { val = new byte [ ] { 0x22 } ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TERMINAL_CAPABILITIES ) { val = new byte [ ] { ( byte ) 0xE0 , ( byte ) 0xA0 , 0x00 } ; } else if ( pTagAndLength . getTag ( ) == EmvTags . ADDITIONAL_TERMINAL_CAPABILITIES ) { val = new byte [ ] { ( byte ) 0x8e , ( byte ) 0 , ( byte ) 0xb0 , 0x50 , 0x05 } ; } else if ( pTagAndLength . getTag ( ) == EmvTags . DS_REQUESTED_OPERATOR_ID ) { val = BytesUtils . fromString ( "7A45123EE59C7F40" ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . UNPREDICTABLE_NUMBER ) { random . nextBytes ( ret ) ; } else if ( pTagAndLength . getTag ( ) == EmvTags . MERCHANT_TYPE_INDICATOR ) { val = new byte [ ] { 0x01 } ; } else if ( pTagAndLength . getTag ( ) == EmvTags . TERMINAL_TRANSACTION_INFORMATION ) { val = new byte [ ] { ( byte ) 0xC0 , ( byte ) 0x80 , 0 } ; } if ( val != null ) { System . arraycopy ( val , 0 , ret , Math . max ( ret . length - val . length , 0 ) , Math . min ( val . length , ret . length ) ) ; } return ret ;
public class TrackerRequestProcessor { /** * Write a { @ link HTTPTrackerErrorMessage } to the response with the given * HTTP status code . * @ param status The HTTP status code to return . * @ param error The error reported by the tracker . */ private void serveError ( Status status , HTTPTrackerErrorMessage error , RequestHandler requestHandler ) throws IOException { } }
requestHandler . serveResponse ( status . getCode ( ) , status . getDescription ( ) , error . getData ( ) ) ;
public class AbstractXTree { /** * Writes all supernodes to the end of the file . This is only supposed to be * used for a final saving of an XTree . If another page is added to this tree , * the supernodes written to file by this operation are over - written . * @ return the number of bytes written to file for this tree ' s supernodes * @ throws IOException if there are any io problems when writing the tree ' s * supernodes */ public long commit ( ) throws IOException { } }
final PageFile < N > file = super . getFile ( ) ; if ( ! ( file instanceof PersistentPageFile ) ) { throw new IllegalStateException ( "Trying to commit a non-persistent XTree" ) ; } long npid = file . getNextPageID ( ) ; XTreeHeader ph = ( XTreeHeader ) ( ( PersistentPageFile < ? > ) file ) . getHeader ( ) ; long offset = ( ph . getReservedPages ( ) + npid ) * ph . getPageSize ( ) ; ph . setSupernode_offset ( npid * ph . getPageSize ( ) ) ; ph . setNumberOfElements ( num_elements ) ; RandomAccessFile ra_file = ( ( PersistentPageFile < ? > ) file ) . getFile ( ) ; ph . writeHeader ( ra_file ) ; ra_file . seek ( offset ) ; long nBytes = 0 ; for ( Iterator < N > iterator = supernodes . values ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { N supernode = iterator . next ( ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( baos ) ; supernode . writeSuperNode ( oos ) ; oos . close ( ) ; baos . close ( ) ; byte [ ] array = baos . toByteArray ( ) ; byte [ ] sn_array = new byte [ getPageSize ( ) * ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ] ; if ( array . length > sn_array . length ) { throw new IllegalStateException ( "Supernode is too large for fitting in " + ( ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ) + " pages of total size " + sn_array . length ) ; } System . arraycopy ( array , 0 , sn_array , 0 , array . length ) ; // file . countWrite ( ) ; ra_file . write ( sn_array ) ; nBytes += sn_array . length ; } return nBytes ;
public class LocalFileEntityResolver { /** * Resolve the entity . First try to find it locally , then fallback to the network . */ public InputSource resolveEntity ( String publicID , String systemID ) throws SAXException , IOException { } }
InputSource localFileInput = resolveLocalEntity ( systemID ) ; return localFileInput != null ? localFileInput : new InputSource ( systemID ) ;
public class WstxInputData { /** * Method that can be used to check whether specified character * is a valid first character of an XML 1.0/1.1 name ; except that * colon ( : ) is not recognized as a start char here : caller has * to verify it separately ( since it generally affects namespace * mapping of a qualified name ) . */ protected final boolean isNameStartChar ( char c ) { } }
/* First , let ' s handle 7 - bit ascii range ( identical between xml * 1.0 and 1.1) */ if ( c <= 0x7A ) { // ' z ' or earlier if ( c >= 0x61 ) { // ' a ' - ' z ' are ok return true ; } if ( c < 0x41 ) { // before ' A ' just white space return false ; } return ( c <= 0x5A ) || ( c == '_' ) ; // ' A ' - ' Z ' and ' _ ' are ok } /* Ok , otherwise need to use a big honking bit sets . . . which * differ between 1.0 and 1.1 */ return mXml11 ? XmlChars . is11NameStartChar ( c ) : XmlChars . is10NameStartChar ( c ) ;
public class TypeLibrary { /** * Translates an unqualified name such as ' String ' to ' java . lang . String ' , _ if _ you added ' java . lang . String ' to the library via the { @ code addType } method . * Also returns the input if it is equal to a fully qualified name added to this type library . * Returns null if it does not match any type in this type library . */ public String toQualified ( String typeReference ) { } }
if ( unqualifiedToQualifiedMap == null ) { if ( typeReference . equals ( unqualified ) || typeReference . equals ( qualified ) ) return qualified ; for ( Map . Entry < String , String > e : LombokInternalAliasing . ALIASES . entrySet ( ) ) { if ( e . getKey ( ) . equals ( typeReference ) ) return e . getValue ( ) ; } return null ; } return unqualifiedToQualifiedMap . get ( typeReference ) ;
public class EtcdClient { /** * Deletes the node with the given key from etcd . * @ param key * the node ' s key * @ return the response from etcd with the node * @ throws EtcdException * in case etcd returned an error */ public EtcdResponse delete ( final String key ) throws EtcdException { } }
UriComponentsBuilder builder = UriComponentsBuilder . fromUriString ( KEYSPACE ) ; builder . pathSegment ( key ) ; return execute ( builder , HttpMethod . DELETE , null , EtcdResponse . class ) ;
public class ClassAliasTypeResolverBuilder { /** * Registers mappings between classes and aliases ( ids ) . * @ param classToId */ public void setClassToId ( final Map < Class < ? > , String > classToId ) { } }
Map < String , Class < ? > > reverseMap = new HashMap < String , Class < ? > > ( ) ; for ( Map . Entry < Class < ? > , String > entry : classToId . entrySet ( ) ) { Assert . notNull ( entry . getKey ( ) , "Class cannot be null: " + entry ) ; Assert . hasText ( entry . getValue ( ) , "Alias (id) cannot be null or contain only whitespaces" + entry ) ; if ( reverseMap . put ( entry . getValue ( ) , entry . getKey ( ) ) != null ) { throw new IllegalArgumentException ( "Two or more classes with the same alias (id): " + entry . getValue ( ) ) ; } } this . classToId = classToId ; this . idToClass = reverseMap ;
public class ExpressionUtils { /** * Use { @ link # createAndRegisterExpression ( DynamicJasperDesign , String , CustomExpression ) } * This deprecated version may cause wrong field values when expression is executed in a group footer * @ param name * @ param expression * @ return */ @ Deprecated public static JRDesignExpression createExpression ( String name , CustomExpression expression ) { } }
return createExpression ( name , expression , false ) ;
public class HttpOutboundServiceContextImpl { /** * This gets the next body buffer asynchronously . If the body is encoded * or compressed , then the encoding is removed and the " next " buffer * returned . Null callbacks are not allowed and will trigger a * NullPointerException . * If the asynchronous request is fulfilled on the same thread , then this connection ' s VirtualConnection will be returned and the callback will not be used . A null return code * means that an asynchronous read is in progress . * The boolean bForce parameter allows the caller to force the asynchronous action even if it could be handled immediately . The return code will always be null and the callback * always used . * Once given a buffer , we keep no record of that buffer . It is the users responsibility to release it . * @ param callback * @ param bForce * @ return VirtualConnection ( null if an async read is in progress , * non - null if data is ready ) * @ throws BodyCompleteException * - - if the entire body has already been read */ @ Override public VirtualConnection getResponseBodyBuffer ( InterChannelCallback callback , boolean bForce ) throws BodyCompleteException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "getResponseBodyBuffer(async)" ) ; } try { if ( ! checkBodyValidity ( ) || incomingBuffersReady ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): read not needed" ) ; } if ( bForce ) { callback . complete ( getVC ( ) ) ; return null ; } return getVC ( ) ; } } catch ( IOException ioe ) { // no FFDC required callback . error ( getVC ( ) , ioe ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): error " + ioe ) ; } return null ; } if ( isBodyComplete ( ) ) { // throw new BodyCompleteException ( " No more body to read " ) ; // instead of throwing an exception , just return the VC as though // data is immediately ready and the caller will switch to their // sync block and then get a null buffer back if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): body complete" ) ; } if ( bForce ) { callback . complete ( getVC ( ) ) ; return null ; } return getVC ( ) ; } setAppReadCallback ( callback ) ; setForceAsync ( bForce ) ; setMultiRead ( false ) ; try { if ( ! readBodyBuffer ( getResponseImpl ( ) , true ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): read finished" ) ; } if ( bForce ) { callback . complete ( getVC ( ) ) ; return null ; } return getVC ( ) ; } } catch ( IOException ioe ) { // no FFDC required if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): exception: " + ioe ) ; } callback . error ( getVC ( ) , ioe ) ; return null ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getResponseBodyBuffer(async): null" ) ; } return null ;
public class XARecorderRecovery { /** * / * ( non - Javadoc ) * @ see org . csc . phynixx . loggersystem . logrecord . IXARecoderRecovery # close ( ) */ @ Override public synchronized void close ( ) { } }
Set < IXADataRecorder > recoveredXADataRecorders = this . getRecoveredXADataRecorders ( ) ; for ( IXADataRecorder dataRecorder : recoveredXADataRecorders ) { dataRecorder . disqualify ( ) ; }
public class SubItemUtil { /** * select or unselect all sub itmes underneath an expandable item * @ param adapter the adapter instance * @ param header the header who ' s children should be selected or deselected * @ param select the new selected state of the sub items */ public static < T extends IItem & IExpandable > void selectAllSubItems ( final FastAdapter adapter , T header , boolean select ) { } }
selectAllSubItems ( adapter , header , select , false , null ) ;
public class DebugTensorWatch { /** * < pre > * Name of the node to watch . * < / pre > * < code > optional string node _ name = 1 ; < / code > */ public java . lang . String getNodeName ( ) { } }
java . lang . Object ref = nodeName_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; nodeName_ = s ; return s ; }
public class AbstractWMultiSelectList { /** * Determines which selections have been added in the given request . * @ param request the current request * @ return a list of selections that have been added in the given request . */ protected List < ? > getNewSelections ( final Request request ) { } }
String [ ] paramValues = request . getParameterValues ( getId ( ) ) ; if ( paramValues == null || paramValues . length == 0 ) { return NO_SELECTION ; } List < String > values = Arrays . asList ( paramValues ) ; List < Object > newSelections = new ArrayList < > ( values . size ( ) ) ; // Figure out which options have been selected . List < ? > options = getOptions ( ) ; if ( options == null || options . isEmpty ( ) ) { if ( ! isEditable ( ) ) { // User could not have made a selection . return NO_SELECTION ; } options = Collections . EMPTY_LIST ; } for ( Object value : values ) { boolean found = false ; int optionIndex = 0 ; for ( Object option : options ) { if ( option instanceof OptionGroup ) { List < ? > groupOptions = ( ( OptionGroup ) option ) . getOptions ( ) ; if ( groupOptions != null ) { for ( Object nestedOption : groupOptions ) { if ( value . equals ( optionToCode ( nestedOption , optionIndex ++ ) ) ) { newSelections . add ( nestedOption ) ; found = true ; break ; } } } } else if ( value . equals ( optionToCode ( option , optionIndex ++ ) ) ) { newSelections . add ( option ) ; found = true ; break ; } } if ( ! found ) { if ( isEditable ( ) ) { newSelections . add ( value ) ; } else { LOG . warn ( "Option \"" + value + "\" on the request is not a valid option. Will be ignored." ) ; } } } // If no valid options found , then return the current settings if ( newSelections . isEmpty ( ) ) { LOG . warn ( "No options on the request are valid. Will be ignored." ) ; return getValue ( ) ; } // If must have selection and more than 1 option selected , remove the " null " entry if it was selected . if ( ! isAllowNoSelection ( ) && newSelections . size ( ) > 1 ) { List < Object > filtered = new ArrayList < > ( ) ; Object nullOption = null ; for ( Object option : newSelections ) { // Check option is null or empty boolean isNull = option == null ? true : option . toString ( ) . length ( ) == 0 ; if ( isNull ) { // Hold the option as it could be " null " or " empty " nullOption = option ; } else { filtered . add ( option ) ; } } // In the case where only null options were selected , then add one nullOption if ( filtered . isEmpty ( ) ) { filtered . add ( nullOption ) ; } return filtered ; } else { return newSelections ; }
public class Bidi { /** * / * In the isoRuns array , the first entry is used for text outside of any * isolate sequence . Higher entries are used for each more deeply nested * isolate sequence . isoRunLast is the index of the last used entry . The * openings array is used to note the data of opening brackets not yet * matched by a closing bracket , or matched but still susceptible to change * level . * Each isoRun entry contains the index of the first and * one - after - last openings entries for pending opening brackets it * contains . The next openings entry to use is the one - after - last of the * most deeply nested isoRun entry . * isoRun entries also contain their current embedding level and the last * encountered strong character , since these will be needed to resolve * the level of paired brackets . */ private void bracketInit ( BracketData bd ) { } }
bd . isoRunLast = 0 ; bd . isoRuns [ 0 ] = new IsoRun ( ) ; bd . isoRuns [ 0 ] . start = 0 ; bd . isoRuns [ 0 ] . limit = 0 ; bd . isoRuns [ 0 ] . level = GetParaLevelAt ( 0 ) ; bd . isoRuns [ 0 ] . lastStrong = bd . isoRuns [ 0 ] . lastBase = bd . isoRuns [ 0 ] . contextDir = ( byte ) ( GetParaLevelAt ( 0 ) & 1 ) ; bd . isoRuns [ 0 ] . contextPos = 0 ; bd . openings = new Opening [ SIMPLE_OPENINGS_COUNT ] ; bd . isNumbersSpecial = reorderingMode == REORDER_NUMBERS_SPECIAL || reorderingMode == REORDER_INVERSE_FOR_NUMBERS_SPECIAL ;
public class AutoMlClient { /** * Imports data into a dataset . For Tables this method can only be called on an empty Dataset . * < p > For Tables : & # 42 ; A * [ schema _ inference _ version ] [ google . cloud . automl . v1beta1 . InputConfig . params ] parameter must be * explicitly set . Returns an empty response in the * [ response ] [ google . longrunning . Operation . response ] field when it completes . * < p > Sample code : * < pre > < code > * try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) { * DatasetName name = DatasetName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ DATASET ] " ) ; * InputConfig inputConfig = InputConfig . newBuilder ( ) . build ( ) ; * autoMlClient . importDataAsync ( name . toString ( ) , inputConfig ) . get ( ) ; * < / code > < / pre > * @ param name Required . Dataset name . Dataset must already exist . All imported annotations and * examples will be added . * @ param inputConfig Required . The desired input location and its domain specific semantics , if * any . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , OperationMetadata > importDataAsync ( String name , InputConfig inputConfig ) { } }
ImportDataRequest request = ImportDataRequest . newBuilder ( ) . setName ( name ) . setInputConfig ( inputConfig ) . build ( ) ; return importDataAsync ( request ) ;
public class ApiOvhTelephony { /** * Get this object properties * REST : GET / telephony / { billingAccount } / ovhPabx / { serviceName } / hunting / agent / { agentId } * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] * @ param agentId [ required ] */ public OvhOvhPabxHuntingAgent billingAccount_ovhPabx_serviceName_hunting_agent_agentId_GET ( String billingAccount , String serviceName , Long agentId ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/ovhPabx/{serviceName}/hunting/agent/{agentId}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName , agentId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOvhPabxHuntingAgent . class ) ;
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 612:1 : annotation : ' @ ' annotationName ( ' ( ' ( elementValuePairs ) ? ' ) ' ) ? ; */ public final void annotation ( ) throws RecognitionException { } }
int annotation_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 65 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:5 : ( ' @ ' annotationName ( ' ( ' ( elementValuePairs ) ? ' ) ' ) ? ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:7 : ' @ ' annotationName ( ' ( ' ( elementValuePairs ) ? ' ) ' ) ? { match ( input , 58 , FOLLOW_58_in_annotation2305 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_annotationName_in_annotation2307 ) ; annotationName ( ) ; state . _fsp -- ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:26 : ( ' ( ' ( elementValuePairs ) ? ' ) ' ) ? int alt87 = 2 ; int LA87_0 = input . LA ( 1 ) ; if ( ( LA87_0 == 36 ) ) { alt87 = 1 ; } switch ( alt87 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:27 : ' ( ' ( elementValuePairs ) ? ' ) ' { match ( input , 36 , FOLLOW_36_in_annotation2310 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:31 : ( elementValuePairs ) ? int alt86 = 2 ; int LA86_0 = input . LA ( 1 ) ; if ( ( ( LA86_0 >= CharacterLiteral && LA86_0 <= DecimalLiteral ) || LA86_0 == FloatingPointLiteral || ( LA86_0 >= HexLiteral && LA86_0 <= Identifier ) || ( LA86_0 >= OctalLiteral && LA86_0 <= StringLiteral ) || LA86_0 == 29 || LA86_0 == 36 || ( LA86_0 >= 40 && LA86_0 <= 41 ) || ( LA86_0 >= 44 && LA86_0 <= 45 ) || LA86_0 == 53 || LA86_0 == 58 || LA86_0 == 65 || LA86_0 == 67 || ( LA86_0 >= 70 && LA86_0 <= 71 ) || LA86_0 == 77 || ( LA86_0 >= 79 && LA86_0 <= 80 ) || LA86_0 == 82 || LA86_0 == 85 || LA86_0 == 92 || LA86_0 == 94 || ( LA86_0 >= 97 && LA86_0 <= 98 ) || LA86_0 == 105 || LA86_0 == 108 || LA86_0 == 111 || LA86_0 == 115 || LA86_0 == 118 || LA86_0 == 121 || LA86_0 == 126 ) ) { alt86 = 1 ; } switch ( alt86 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 613:31 : elementValuePairs { pushFollow ( FOLLOW_elementValuePairs_in_annotation2312 ) ; elementValuePairs ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } match ( input , 37 , FOLLOW_37_in_annotation2315 ) ; if ( state . failed ) return ; } break ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 65 , annotation_StartIndex ) ; } }
public class InvocationDispatcher { /** * Wrap API to SimpleDtxnInitiator - mostly for the future */ public CreateTransactionResult createTransaction ( final long connectionId , final long txnId , final long uniqueId , final StoredProcedureInvocation invocation , final boolean isReadOnly , final boolean isSinglePartition , final boolean isEveryPartition , final int [ ] partitions , final int messageSize , long nowNanos , final boolean isForReplay ) { } }
assert ( ! isSinglePartition || ( partitions . length == 1 ) ) ; final ClientInterfaceHandleManager cihm = m_cihm . get ( connectionId ) ; if ( cihm == null ) { hostLog . rateLimitedLog ( 60 , Level . WARN , null , "InvocationDispatcher.createTransaction request rejected. " + "This is likely due to VoltDB ceasing client communication as it " + "shuts down." ) ; return CreateTransactionResult . NO_CLIENT_HANDLER ; } Long initiatorHSId = null ; boolean isShortCircuitRead = false ; /* * Send the read to the partition leader only * @ MigratePartitionLeader always goes to partition leader */ if ( isSinglePartition && ! isEveryPartition ) { initiatorHSId = m_cartographer . getHSIdForSinglePartitionMaster ( partitions [ 0 ] ) ; } else { // Multi - part transactions go to the multi - part coordinator initiatorHSId = m_cartographer . getHSIdForMultiPartitionInitiator ( ) ; // Treat all MP reads as short - circuit since they can run out - of - order // from their arrival order due to the MP Read - only execution pool if ( isReadOnly ) { isShortCircuitRead = true ; } } if ( initiatorHSId == null ) { hostLog . rateLimitedLog ( 60 , Level . INFO , null , "InvocationDispatcher.createTransaction request rejected. " + "This is likely due to parition leader being removed during elastic shrink." ) ; return CreateTransactionResult . PARTITION_REMOVED ; } long handle = cihm . getHandle ( isSinglePartition , isSinglePartition ? partitions [ 0 ] : - 1 , invocation . getClientHandle ( ) , messageSize , nowNanos , invocation . getProcName ( ) , initiatorHSId , isShortCircuitRead ) ; Iv2InitiateTaskMessage workRequest = new Iv2InitiateTaskMessage ( m_siteId , initiatorHSId , Iv2InitiateTaskMessage . UNUSED_TRUNC_HANDLE , txnId , uniqueId , isReadOnly , isSinglePartition , ( partitions == null ) || ( partitions . length < 2 ) ? null : partitions , invocation , handle , connectionId , isForReplay ) ; Long finalInitiatorHSId = initiatorHSId ; final VoltTrace . TraceEventBatch traceLog = VoltTrace . log ( VoltTrace . Category . CI ) ; if ( traceLog != null ) { traceLog . add ( ( ) -> VoltTrace . instantAsync ( "inittxn" , invocation . getClientHandle ( ) , "clientHandle" , Long . toString ( invocation . getClientHandle ( ) ) , "ciHandle" , Long . toString ( handle ) , "partitions" , partitions . toString ( ) , "dest" , CoreUtils . hsIdToString ( finalInitiatorHSId ) ) ) ; } Iv2Trace . logCreateTransaction ( workRequest ) ; m_mailbox . send ( initiatorHSId , workRequest ) ; return CreateTransactionResult . SUCCESS ;
public class MatrixIO { /** * Writes the matrix to the specified output file in the provided format * @ param matrix the matrix to be written * @ param output the file in which the matrix should be written * @ param format the data format in which the matrix ' s data should be * written * @ throws IllegalArgumentException if the input matrix is 0 - dimensional * @ throws IOException if an error occurs while writing to the output file */ public static void writeMatrix ( Matrix matrix , File output , Format format ) throws IOException { } }
if ( matrix . rows ( ) == 0 || matrix . columns ( ) == 0 ) throw new IllegalArgumentException ( "cannot write 0-dimensional matrix" ) ; switch ( format ) { case DENSE_TEXT : { PrintWriter pw = new PrintWriter ( output ) ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { StringBuffer sb = new StringBuffer ( matrix . columns ( ) * 5 ) ; for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { sb . append ( matrix . get ( i , j ) ) . append ( " " ) ; } pw . println ( sb . toString ( ) ) ; } pw . close ( ) ; break ; } // These two formats are equivalent case CLUTO_DENSE : case SVDLIBC_DENSE_TEXT : { PrintWriter pw = new PrintWriter ( output ) ; pw . println ( matrix . rows ( ) + " " + matrix . columns ( ) ) ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { StringBuffer sb = new StringBuffer ( 32 ) ; for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { sb . append ( ( float ) ( matrix . get ( i , j ) ) ) . append ( " " ) ; } pw . println ( sb . toString ( ) ) ; } pw . close ( ) ; break ; } case SVDLIBC_DENSE_BINARY : { DataOutputStream outStream = new DataOutputStream ( new BufferedOutputStream ( new FileOutputStream ( output ) ) ) ; outStream . writeInt ( matrix . rows ( ) ) ; outStream . writeInt ( matrix . columns ( ) ) ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { outStream . writeFloat ( ( float ) matrix . get ( i , j ) ) ; } } outStream . close ( ) ; break ; } case CLUTO_SPARSE : { PrintWriter pw = new PrintWriter ( output ) ; // Count the number of non - zero values in the matrix int nonZero = 0 ; int rows = matrix . rows ( ) ; for ( int i = 0 ; i < rows ; ++ i ) { DoubleVector v = matrix . getRowVector ( i ) ; if ( v instanceof SparseVector ) nonZero += ( ( SparseVector ) v ) . getNonZeroIndices ( ) . length ; else { for ( int col = 0 ; col < v . length ( ) ; ++ col ) { if ( v . get ( col ) != 0 ) nonZero ++ ; } } } // Write the header : rows cols non - zero pw . println ( matrix . rows ( ) + " " + matrix . columns ( ) + " " + nonZero ) ; for ( int row = 0 ; row < rows ; ++ row ) { StringBuilder sb = new StringBuilder ( nonZero / rows ) ; // NOTE : the columns in CLUTO start at 1 , not 0 , so increment // one to each of the columns DoubleVector v = matrix . getRowVector ( row ) ; if ( v instanceof SparseVector ) { int [ ] nzIndices = ( ( SparseVector ) v ) . getNonZeroIndices ( ) ; for ( int nz : nzIndices ) { sb . append ( nz + 1 ) . append ( " " ) . append ( v . get ( nz ) ) . append ( " " ) ; } } else { for ( int col = 0 ; col < v . length ( ) ; ++ col ) { double d = v . get ( col ) ; if ( d != 0 ) sb . append ( col + 1 ) . append ( " " ) . append ( d ) . append ( " " ) ; } } pw . println ( sb . toString ( ) ) ; } pw . close ( ) ; break ; } case SVDLIBC_SPARSE_TEXT : { PrintWriter pw = new PrintWriter ( output ) ; // count the number of non - zero values for each column as well as // the total int nonZero = 0 ; int [ ] nonZeroPerCol = new int [ matrix . columns ( ) ] ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { if ( matrix . get ( i , j ) != 0 ) { nonZero ++ ; nonZeroPerCol [ j ] ++ ; } } } // loop through the matrix a second time , printing out the number of // non - zero values for each column , followed by those values and // their associated row pw . println ( matrix . rows ( ) + " " + matrix . columns ( ) + " " + nonZero ) ; for ( int col = 0 ; col < matrix . columns ( ) ; ++ col ) { pw . println ( nonZeroPerCol [ col ] ) ; if ( nonZeroPerCol [ col ] > 0 ) { for ( int row = 0 ; row < matrix . rows ( ) ; ++ row ) { double val = matrix . get ( row , col ) ; if ( val != 0d ) { // NOTE : need to convert to float since this is what // SVDLIBC uses pw . println ( row + " " + Double . valueOf ( val ) . floatValue ( ) ) ; } } } } pw . close ( ) ; break ; } case SVDLIBC_SPARSE_BINARY : { DataOutputStream outStream = new DataOutputStream ( new BufferedOutputStream ( new FileOutputStream ( output ) ) ) ; // count the number of non - zero values for each column as well as // the total int nonZero = 0 ; int [ ] nonZeroPerCol = new int [ matrix . columns ( ) ] ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { if ( matrix . get ( i , j ) != 0 ) { nonZero ++ ; nonZeroPerCol [ j ] ++ ; } } } // Write the 12 byte header data outStream . writeInt ( matrix . rows ( ) ) ; outStream . writeInt ( matrix . columns ( ) ) ; outStream . writeInt ( nonZero ) ; // loop through the matrix a second time , printing out the number of // non - zero values for each column , followed by those values and // their associated row for ( int col = 0 ; col < matrix . columns ( ) ; ++ col ) { outStream . writeInt ( nonZeroPerCol [ col ] ) ; if ( nonZeroPerCol [ col ] > 0 ) { for ( int row = 0 ; row < matrix . rows ( ) ; ++ row ) { double val = matrix . get ( row , col ) ; if ( val != 0 ) { // NOTE : need to convert to float since this is what // SVDLIBC uses outStream . writeInt ( row ) ; outStream . writeFloat ( ( float ) val ) ; } } } } outStream . close ( ) ; break ; } case MATLAB_SPARSE : { PrintWriter pw = new PrintWriter ( output ) ; // NOTE : Matlab ' s sparse matrix offers no way of specifying the // original matrix ' s dimensions . This is only problematic if the // matrix contains trailing rows or columns that are all // zeros . Therefore to ensure that the matrix has the correct size , // we track the maximum values written and write a 0 - value to extend // the matrix to its correct size int maxRowSeen = 0 ; int maxColSeen = 0 ; for ( int i = 0 ; i < matrix . rows ( ) ; ++ i ) { for ( int j = 0 ; j < matrix . columns ( ) ; ++ j ) { if ( matrix . get ( i , j ) == 0 ) continue ; if ( j > maxColSeen ) maxColSeen = j ; if ( i > maxRowSeen ) maxRowSeen = i ; StringBuffer sb = new StringBuffer ( 32 ) ; // Add 1 to index values since Matlab arrays are 1 - based , // not 0 - based sb . append ( i + 1 ) . append ( " " ) . append ( j + 1 ) ; sb . append ( " " ) . append ( matrix . get ( i , j ) ) ; pw . println ( sb . toString ( ) ) ; } } // Check whether we need to extend the matrix if ( maxRowSeen + 1 != matrix . rows ( ) || maxColSeen + 1 != matrix . columns ( ) ) { pw . println ( matrix . rows ( ) + " " + matrix . columns ( ) + " 0" ) ; } pw . close ( ) ; break ; } default : throw new UnsupportedOperationException ( "writing to " + format + " is currently unsupported" ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Budget } { @ code > } } */ @ XmlElementDecl ( namespace = "http://schema.intuit.com/finance/v3" , name = "Budget" , substitutionHeadNamespace = "http://schema.intuit.com/finance/v3" , substitutionHeadName = "IntuitObject" ) public JAXBElement < Budget > createBudget ( Budget value ) { } }
return new JAXBElement < Budget > ( _Budget_QNAME , Budget . class , null , value ) ;
public class RegisteredServiceAccessStrategyUtils { /** * Ensure service sso access is allowed . * @ param registeredService the registered service * @ param service the service * @ param ticketGrantingTicket the ticket granting ticket */ public static void ensureServiceSsoAccessIsAllowed ( final RegisteredService registeredService , final Service service , final TicketGrantingTicket ticketGrantingTicket ) { } }
ensureServiceSsoAccessIsAllowed ( registeredService , service , ticketGrantingTicket , false ) ;
public class CmsAccessControlList { /** * Calculates the permissions of the given user and his groups from the access control list . < p > * The permissions are returned as permission string in the format { { + | - } { r | w | v | c | i } } * . * @ param user the user * @ param groups the groups of this user * @ param roles the roles of this user * @ return a string that displays the permissions */ public String getPermissionString ( CmsUser user , List < CmsGroup > groups , List < CmsRole > roles ) { } }
return getPermissions ( user , groups , roles ) . getPermissionString ( ) ;
public class JobInProgressTraits { /** * Return a vector of completed TaskInProgress objects */ public Vector < TaskInProgress > reportTasksInProgress ( boolean shouldBeMap , boolean shouldBeComplete ) { } }
Vector < TaskInProgress > results = new Vector < TaskInProgress > ( ) ; TaskInProgress tips [ ] = null ; if ( shouldBeMap ) { tips = maps ; } else { tips = reduces ; } for ( int i = 0 ; i < tips . length ; i ++ ) { if ( tips [ i ] . isComplete ( ) == shouldBeComplete ) { results . add ( tips [ i ] ) ; } } return results ;
public class ImageResolutionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setXResol ( Integer newXResol ) { } }
Integer oldXResol = xResol ; xResol = newXResol ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IMAGE_RESOLUTION__XRESOL , oldXResol , xResol ) ) ;
public class Broadcaster { /** * A . ts file was written in the recording directory . * Use this opportunity to verify the segment is of expected size * given the target bitrate * Called on a background thread */ @ Subscribe public void onSegmentWritten ( HlsSegmentWrittenEvent event ) { } }
try { File hlsSegment = event . getSegment ( ) ; queueOrSubmitUpload ( keyForFilename ( hlsSegment . getName ( ) ) , hlsSegment ) ; if ( isKitKat ( ) && mConfig . isAdaptiveBitrate ( ) && isRecording ( ) ) { // Adjust bitrate to match expected filesize long actualSegmentSizeBytes = hlsSegment . length ( ) ; long expectedSizeBytes = ( ( mConfig . getAudioBitrate ( ) / 8 ) + ( mVideoBitrate / 8 ) ) * mConfig . getHlsSegmentDuration ( ) ; float filesizeRatio = actualSegmentSizeBytes / ( float ) expectedSizeBytes ; if ( VERBOSE ) Log . i ( TAG , "OnSegmentWritten. Segment size: " + ( actualSegmentSizeBytes / 1000 ) + "kB. ratio: " + filesizeRatio ) ; if ( filesizeRatio < .7 ) { if ( mLastRealizedBandwidthBytesPerSec != 0 ) { // Scale bitrate while not exceeding available bandwidth float scaledBitrate = mVideoBitrate * ( 1 / filesizeRatio ) ; float bandwidthBitrate = mLastRealizedBandwidthBytesPerSec * 8 ; mVideoBitrate = ( int ) Math . min ( scaledBitrate , bandwidthBitrate ) ; } else { // Scale bitrate to match expected fileSize mVideoBitrate *= ( 1 / filesizeRatio ) ; } if ( VERBOSE ) Log . i ( TAG , "Scaling video bitrate to " + mVideoBitrate + " bps" ) ; adjustVideoBitrate ( mVideoBitrate ) ; } } } catch ( Exception ex ) { ex . printStackTrace ( ) ; }
public class ExposeLinearLayoutManagerEx { /** * If there is a pending scroll position or saved states , updates the anchor info from that * data and returns true */ private boolean updateAnchorFromPendingDataExpose ( RecyclerView . State state , AnchorInfo anchorInfo ) { } }
if ( state . isPreLayout ( ) || mCurrentPendingScrollPosition == RecyclerView . NO_POSITION ) { return false ; } // validate scroll position if ( mCurrentPendingScrollPosition < 0 || mCurrentPendingScrollPosition >= state . getItemCount ( ) ) { mCurrentPendingScrollPosition = RecyclerView . NO_POSITION ; mPendingScrollPositionOffset = INVALID_OFFSET ; if ( DEBUG ) { Log . e ( TAG , "ignoring invalid scroll position " + mCurrentPendingScrollPosition ) ; } return false ; } // if child is visible , try to make it a reference child and ensure it is fully visible . // if child is not visible , align it depending on its virtual position . anchorInfo . mPosition = mCurrentPendingScrollPosition ; if ( mCurrentPendingSavedState != null && mCurrentPendingSavedState . getInt ( "AnchorPosition" ) >= 0 ) { // Anchor offset depends on how that child was laid out . Here , we update it // according to our current view bounds anchorInfo . mLayoutFromEnd = mCurrentPendingSavedState . getBoolean ( "AnchorLayoutFromEnd" ) ; if ( anchorInfo . mLayoutFromEnd ) { anchorInfo . mCoordinate = mOrientationHelper . getEndAfterPadding ( ) - mCurrentPendingSavedState . getInt ( "AnchorOffset" ) ; } else { anchorInfo . mCoordinate = mOrientationHelper . getStartAfterPadding ( ) + mCurrentPendingSavedState . getInt ( "AnchorOffset" ) ; } return true ; } if ( mPendingScrollPositionOffset == INVALID_OFFSET ) { View child = findViewByPosition ( mCurrentPendingScrollPosition ) ; if ( child != null ) { final int childSize = mOrientationHelper . getDecoratedMeasurement ( child ) ; if ( childSize > mOrientationHelper . getTotalSpace ( ) ) { // item does not fit . fix depending on layout direction anchorInfo . assignCoordinateFromPadding ( ) ; return true ; } final int startGap = mOrientationHelper . getDecoratedStart ( child ) - mOrientationHelper . getStartAfterPadding ( ) ; if ( startGap < 0 ) { anchorInfo . mCoordinate = mOrientationHelper . getStartAfterPadding ( ) ; anchorInfo . mLayoutFromEnd = false ; return true ; } final int endGap = mOrientationHelper . getEndAfterPadding ( ) - mOrientationHelper . getDecoratedEnd ( child ) ; if ( endGap < 0 ) { anchorInfo . mCoordinate = mOrientationHelper . getEndAfterPadding ( ) ; anchorInfo . mLayoutFromEnd = true ; return true ; } anchorInfo . mCoordinate = anchorInfo . mLayoutFromEnd ? ( mOrientationHelper . getDecoratedEnd ( child ) + mOrientationHelper . getTotalSpaceChange ( ) ) : mOrientationHelper . getDecoratedStart ( child ) ; } else { // item is not visible . if ( getChildCount ( ) > 0 ) { // get position of any child , does not matter int pos = getPosition ( getChildAt ( 0 ) ) ; anchorInfo . mLayoutFromEnd = mCurrentPendingScrollPosition < pos == mShouldReverseLayoutExpose ; } anchorInfo . assignCoordinateFromPadding ( ) ; } return true ; } // override layout from end values for consistency anchorInfo . mLayoutFromEnd = mShouldReverseLayoutExpose ; if ( mShouldReverseLayoutExpose ) { anchorInfo . mCoordinate = mOrientationHelper . getEndAfterPadding ( ) - mPendingScrollPositionOffset ; } else { anchorInfo . mCoordinate = mOrientationHelper . getStartAfterPadding ( ) + mPendingScrollPositionOffset ; } return true ;
public class SqlLoaderImpl { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . store . SqlLoader # setLoadPath ( java . lang . String ) */ @ Override public void setLoadPath ( final String loadPath ) { } }
if ( loadPath == null ) { LOG . warn ( "Use the default value because SQL template path is set to NULL." ) ; LOG . warn ( "Default load path[{}]" , DEFAULT_LOAD_PATH ) ; this . loadPath = DEFAULT_LOAD_PATH ; } else { this . loadPath = loadPath ; }
public class StorageWriter { /** * Acknowledges operations that were flushed to storage */ private CompletableFuture < Void > acknowledge ( Void ignored ) { } }
checkRunning ( ) ; long traceId = LoggerHelpers . traceEnterWithContext ( log , this . traceObjectId , "acknowledge" ) ; long highestCommittedSeqNo = this . ackCalculator . getHighestCommittedSequenceNumber ( this . processors . values ( ) ) ; long ackSequenceNumber = this . dataSource . getClosestValidTruncationPoint ( highestCommittedSeqNo ) ; if ( ackSequenceNumber > this . state . getLastTruncatedSequenceNumber ( ) ) { // Issue the truncation and update the state ( when done ) . return this . dataSource . acknowledge ( ackSequenceNumber , this . config . getAckTimeout ( ) ) . thenRun ( ( ) -> { this . state . setLastTruncatedSequenceNumber ( ackSequenceNumber ) ; logStageEvent ( "Acknowledged" , "SeqNo=" + ackSequenceNumber ) ; LoggerHelpers . traceLeave ( log , this . traceObjectId , "acknowledge" , traceId , ackSequenceNumber ) ; } ) ; } else { // Nothing to do . LoggerHelpers . traceLeave ( log , this . traceObjectId , "acknowledge" , traceId , Operation . NO_SEQUENCE_NUMBER ) ; return CompletableFuture . completedFuture ( null ) ; }
public class HibernateRepositoryDao { /** * < p > getAllTypes . < / p > * @ return a { @ link java . util . List } object . */ @ SuppressWarnings ( "unchecked" ) public List < RepositoryType > getAllTypes ( ) { } }
final Criteria crit = sessionService . getSession ( ) . createCriteria ( RepositoryType . class ) ; List < RepositoryType > list = crit . list ( ) ; HibernateLazyInitializer . initCollection ( list ) ; return list ;
public class SarlFormalParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case SarlPackage . SARL_FORMAL_PARAMETER__DEFAULT_VALUE : return defaultValue != null ; } return super . eIsSet ( featureID ) ;
public class MatchConditionOnElements { /** * Removes the pseudo class from the given element . * @ param e the DOM element * @ param pseudoClass the pseudo class to be removed */ public void removeMatch ( Element e , PseudoClassType pseudoClass ) { } }
if ( elements != null ) { Set < PseudoClassType > classes = elements . get ( e ) ; if ( classes != null ) classes . remove ( pseudoClass ) ; }
public class StorageImportDispatcher { /** * Aggregates the api , client , and plan policies into a single ordered list . * @ param contractBean * @ param clientInfo */ private List < Policy > aggregateContractPolicies ( ContractBean contractBean , EntityInfo clientInfo ) throws StorageException { } }
List < Policy > policies = new ArrayList < > ( ) ; PolicyType [ ] types = new PolicyType [ ] { PolicyType . Client , PolicyType . Plan , PolicyType . Api } ; for ( PolicyType policyType : types ) { String org , id , ver ; switch ( policyType ) { case Client : { org = clientInfo . organizationId ; id = clientInfo . id ; ver = clientInfo . version ; break ; } case Plan : { org = contractBean . getApi ( ) . getApi ( ) . getOrganization ( ) . getId ( ) ; id = contractBean . getPlan ( ) . getPlan ( ) . getId ( ) ; ver = contractBean . getPlan ( ) . getVersion ( ) ; break ; } case Api : { org = contractBean . getApi ( ) . getApi ( ) . getOrganization ( ) . getId ( ) ; id = contractBean . getApi ( ) . getApi ( ) . getId ( ) ; ver = contractBean . getApi ( ) . getVersion ( ) ; break ; } default : { throw new RuntimeException ( "Missing case for switch!" ) ; // $ NON - NLS - 1 $ } } Iterator < PolicyBean > clientPolicies = storage . getAllPolicies ( org , id , ver , policyType ) ; while ( clientPolicies . hasNext ( ) ) { PolicyBean policyBean = clientPolicies . next ( ) ; Policy policy = new Policy ( ) ; policy . setPolicyJsonConfig ( policyBean . getConfiguration ( ) ) ; policy . setPolicyImpl ( policyBean . getDefinition ( ) . getPolicyImpl ( ) ) ; policies . add ( policy ) ; } } return policies ;
public class DataIO { /** * Get a slice of rows out of the table matching the time window * @ param table * @ param timeCol * @ param start * @ param end * @ return the first and last row that matches the time window start - > end */ public static int [ ] sliceByTime ( CSTable table , int timeCol , Date start , Date end ) { } }
if ( end . before ( start ) ) { throw new IllegalArgumentException ( "end<start" ) ; } if ( timeCol < 0 ) { throw new IllegalArgumentException ( "timeCol :" + timeCol ) ; } int s = - 1 ; int e = - 1 ; int i = - 1 ; for ( String [ ] col : table . rows ( ) ) { i ++ ; Date d = Conversions . convert ( col [ timeCol ] , Date . class ) ; if ( s == - 1 && ( start . before ( d ) || start . equals ( d ) ) ) { s = i ; } if ( e == - 1 && ( end . before ( d ) || end . equals ( d ) ) ) { e = i ; break ; } } return new int [ ] { s , e } ;
public class Matchers { /** * Match in range < tt > start < / tt > , < tt > end < / tt > inclusive * @ see CharSequenceSliceMatcher # sliceThat ( int , int , Matcher ) * @ param start the slice start , inclusive , can be negative * @ param end the slice end , inclusive , can be negative * @ param matcher the nested matcher to use with the slice * @ return the matcher */ public static Matcher < CharSequence > sliceThat ( int start , int end , Matcher < CharSequence > matcher ) { } }
return CharSequenceSliceMatcher . sliceThat ( start , end , matcher ) ;
public class CollectionUtil { /** * public static String [ ] toStringArray ( Key [ ] keys ) { if ( keys = = null ) return null ; String [ ] arr = new * String [ keys . length ] ; for ( int i = 0 ; i < keys . length ; i + + ) { arr [ i ] = keys [ i ] . getString ( ) ; } return arr ; } */ public static String getKeyList ( Iterator < Key > it , String delimiter ) { } }
StringBuilder sb = new StringBuilder ( it . next ( ) . getString ( ) ) ; if ( delimiter . length ( ) == 1 ) { char c = delimiter . charAt ( 0 ) ; while ( it . hasNext ( ) ) { sb . append ( c ) ; sb . append ( it . next ( ) . getString ( ) ) ; } } else { while ( it . hasNext ( ) ) { sb . append ( delimiter ) ; sb . append ( it . next ( ) . getString ( ) ) ; } } return sb . toString ( ) ;
public class MultiTable { /** * Read the record that matches this record ' s current key . * @ exception DBException File exception . */ public boolean seek ( String strSeekSign ) throws DBException { } }
boolean bSuccess = false ; BaseTable table = this . getCurrentTable ( ) ; if ( table != null ) { table . getRecord ( ) . setKeyArea ( this . getRecord ( ) . getDefaultOrder ( ) ) ; bSuccess = table . seek ( strSeekSign ) ; if ( bSuccess ) // Move to first record this . setCurrentTable ( table ) ; } this . syncCurrentToBase ( ) ; return bSuccess ;
public class SovereigntyApi { /** * List sovereignty structures Shows sovereignty data for structures . - - - * This route is cached for up to 120 seconds * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return ApiResponse & lt ; List & lt ; SovereigntyStructuresResponse & gt ; & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < List < SovereigntyStructuresResponse > > getSovereigntyStructuresWithHttpInfo ( String datasource , String ifNoneMatch ) throws ApiException { } }
com . squareup . okhttp . Call call = getSovereigntyStructuresValidateBeforeCall ( datasource , ifNoneMatch , null ) ; Type localVarReturnType = new TypeToken < List < SovereigntyStructuresResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;