signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ListSubscriptionsByTopicResult { /** * A list of subscriptions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSubscriptions ( java . util . Collection ) } or { @ link # withSubscriptions ( java . util . Collection ) } if you want * to override the existing values . * @ param subscriptions * A list of subscriptions . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListSubscriptionsByTopicResult withSubscriptions ( Subscription ... subscriptions ) { } }
if ( this . subscriptions == null ) { setSubscriptions ( new com . amazonaws . internal . SdkInternalList < Subscription > ( subscriptions . length ) ) ; } for ( Subscription ele : subscriptions ) { this . subscriptions . add ( ele ) ; } return this ;
public class DialogUtils { /** * Waits for a { @ link android . app . Dialog } to close . * @ param timeout the amount of time in milliseconds to wait * @ return { @ code true } if the { @ code Dialog } is closed before the timeout and { @ code false } if it is not closed */ public boolean waitForDialogToClose ( long timeout ) { } }
waitForDialogToOpen ( TIMEOUT_DIALOG_TO_CLOSE , false ) ; final long endTime = SystemClock . uptimeMillis ( ) + timeout ; while ( SystemClock . uptimeMillis ( ) < endTime ) { if ( ! isDialogOpen ( ) ) { return true ; } sleeper . sleep ( MINISLEEP ) ; } return false ;
public class DecoratorProxyFactory { /** * m is more generic than a */ private static boolean isEqual ( Method m , Method a ) { } }
if ( m . getName ( ) . equals ( a . getName ( ) ) && m . getParameterTypes ( ) . length == a . getParameterTypes ( ) . length && m . getReturnType ( ) . isAssignableFrom ( a . getReturnType ( ) ) ) { for ( int i = 0 ; i < m . getParameterTypes ( ) . length ; i ++ ) { if ( ! ( m . getParameterTypes ( ) [ i ] . isAssignableFrom ( a . getParameterTypes ( ) [ i ] ) ) ) { return false ; } } return true ; } return false ;
public class HttpOutboundServiceContextImpl { /** * Method to encapsulate the act of reading , parsing , and deciding whether * to keep a response message ( asynchronously ) . This will return if a read * is being performed asynchronously , or if a response was fully parsed and * handed off to the application channel . The caller of this method should * perform no more work after this method . */ public void readAsyncResponse ( ) { } }
// if read data is available or sitting on socket , then start the parse // now otherwise setup for a socket read VirtualConnection vc = null ; if ( ! isReadDataAvailable ( ) && null == getNextReadBuffer ( ) ) { setupReadBuffers ( getHttpConfig ( ) . getIncomingHdrBufferSize ( ) , false ) ; vc = getTSC ( ) . getReadInterface ( ) . read ( 1 , HttpOSCReadCallback . getRef ( ) , true , getReadTimeout ( ) ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "readAsyncResponse found existing data" ) ; } vc = getVC ( ) ; } // if we had data or the read completed already , try to parse it if ( null != vc ) { if ( null != parseResponseMessageAsync ( ) ) { handleParsedMessage ( ) ; } }
public class DocLocale { /** * Find out if there is any HTML tag in the given string . If found * return true else return false . */ private boolean htmlSentenceTerminatorFound ( String str , int index ) { } }
for ( int i = 0 ; i < sentenceTerminators . length ; i ++ ) { String terminator = sentenceTerminators [ i ] ; if ( str . regionMatches ( true , index , terminator , 0 , terminator . length ( ) ) ) { return true ; } } return false ;
public class Call { /** * Abbreviation for { { @ link # methodForBoolean ( String , Object . . . ) } . * @ since 1.1 * @ param methodName the name of the method * @ param optionalParameters the ( optional ) parameters of the method . * @ return the result of the method execution */ public static Function < Object , Boolean > bool ( final String methodName , final Object ... optionalParameters ) { } }
return methodForBoolean ( methodName , optionalParameters ) ;
public class SessionDescriptor { /** * Excludes specified formats from descriptor . * @ param formatName the name of the format . */ public void exclude ( String formatName ) { } }
for ( int i = 0 ; i < count ; i ++ ) { md [ i ] . exclude ( formatName ) ; }
public class CircularBuffer { /** * Add an item to the queue . * @ param item to be added */ public void enqueue ( T item ) { } }
queue [ head ] = item ; head ++ ; size ++ ; if ( head == queue . length ) head = 0 ;
public class CmsSolrIndex { /** * Checks if the current user is allowed to access non - online indexes . < p > * To access non - online indexes the current user must be a workplace user at least . < p > * @ param cms the CMS object initialized with the current request context / user * @ throws CmsSearchException thrown if the access is not permitted */ private void checkOfflineAccess ( CmsObject cms ) throws CmsSearchException { } }
// If an offline index is being selected , check permissions if ( ! CmsProject . ONLINE_PROJECT_NAME . equals ( getProject ( ) ) ) { // only if the user has the role Workplace user , he is allowed to access the Offline index try { OpenCms . getRoleManager ( ) . checkRole ( cms , CmsRole . ELEMENT_AUTHOR ) ; } catch ( CmsRoleViolationException e ) { throw new CmsSearchException ( Messages . get ( ) . container ( Messages . LOG_SOLR_ERR_SEARCH_PERMISSION_VIOLATION_2 , getName ( ) , cms . getRequestContext ( ) . getCurrentUser ( ) ) , e ) ; } }
public class ByteUtils { /** * Converts a byte array to a binary String . * @ param bytes * a byte array * @ param isMSB * true if MSB , false if LSB * @ return binary String */ public static String toBinaryString ( byte [ ] bytes , boolean isMSB ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( byte b : bytes ) { String binary = String . format ( "%8s" , Integer . toBinaryString ( b & 0xFF ) ) . replace ( ' ' , '0' ) ; if ( isMSB ) sb . append ( binary ) ; else sb . append ( new StringBuilder ( binary ) . reverse ( ) ) ; } return sb . toString ( ) ;
public class TaskCreator { /** * Set up cross - check feature . We provide an ' xcheck ' task which depends on a number of Test tasks that run the * unit tests compiled against every Checkstyle version against all the other Checkstyle libraries . In this way , we * find out which versions are compatible . */ public void setupCrossCheckTasks ( ) { } }
final TaskContainer tasks = project . getTasks ( ) ; final Task xtest = tasks . create ( XTEST_TASK_NAME ) ; xtest . setGroup ( XTEST_GROUP_NAME ) ; xtest . setDescription ( "Run the unit tests against all supported Checkstyle runtimes" ) ; tasks . getByName ( JavaBasePlugin . BUILD_TASK_NAME ) . dependsOn ( xtest ) ; for ( final DependencyConfig depConfig : buildUtil . getDepConfigs ( ) . getAll ( ) . values ( ) ) { final JavaVersion javaLevel = depConfig . getJavaLevel ( ) ; final String csBaseVersion = depConfig . getCheckstyleBaseVersion ( ) ; for ( final String csRuntimeVersion : depConfig . getCompatibleCheckstyleVersions ( ) ) { if ( csBaseVersion . equals ( csRuntimeVersion ) ) { continue ; } final TestTask testTask = tasks . create ( TaskNames . xtest . getName ( depConfig , csRuntimeVersion ) , TestTask . class ) ; testTask . configureFor ( depConfig , csRuntimeVersion ) ; testTask . setGroup ( XTEST_GROUP_NAME ) ; testTask . setDescription ( "Run the unit tests compiled for Checkstyle " + csBaseVersion + " against a Checkstyle " + csRuntimeVersion + " runtime (Java level: " + javaLevel + ")" ) ; testTask . getReports ( ) . getHtml ( ) . setEnabled ( false ) ; xtest . dependsOn ( testTask ) ; } }
public class SubmissionUtils { /** * Re - creates a document given the document and the reserved keys . * @ param doc Main document * @ param reserved Document that contains reserved keys . A reserve key starts with an underscore . In this document , * the reserved keys do not have the starting underscore . * @ return * @ throws Exception */ static public JSONObject recreateDocumentFromDocAndReserved ( JSONObject doc , JSONObject reserved ) throws Exception { } }
JSONObject result = JSONSupport . copyObject ( doc ) ; // Re - insert attributes that start with ' _ ' if ( null != reserved ) { Iterator < ? > it = reserved . keys ( ) ; while ( it . hasNext ( ) ) { Object keyObj = it . next ( ) ; if ( keyObj instanceof String ) { String key = ( String ) keyObj ; Object value = reserved . opt ( key ) ; result . put ( "_" + key , value ) ; } } } return result ;
public class Metadata { /** * Adds a new element to the specific element class . If no element class is found , then a new element class . will be * created . * @ param className * the class name * @ param classElement * the new element to be added . */ public void addClassElement ( final String className , final MetadataElement classElement ) { } }
classElement . setType ( getNamespaceValue ( classElement . getType ( ) ) ) ; if ( classElement . getMaxOccurs ( ) != null && ! classElement . getMaxOccurs ( ) . equals ( "1" ) ) { classElement . setMaxOccurs ( "unbounded" ) ; } for ( MetadataItem item : classList ) { if ( item . getName ( ) . equals ( className ) && item . getNamespace ( ) . equals ( getCurrentNamespace ( ) ) && item . getPackageApi ( ) . equals ( getCurrentPackageApi ( ) ) ) { // check for a element with the same name , if found then set ' maxOccurs = unbounded ' for ( MetadataElement element : item . getElements ( ) ) { if ( element . getName ( ) . equals ( classElement . getName ( ) ) && ! classElement . getIsAttribute ( ) ) { element . setMaxOccurs ( "unbounded" ) ; return ; } } item . getElements ( ) . add ( classElement ) ; return ; } } final MetadataItem newItem = new MetadataItem ( className ) ; newItem . getElements ( ) . add ( classElement ) ; newItem . setNamespace ( getCurrentNamespace ( ) ) ; newItem . setSchemaName ( getCurrentSchmema ( ) ) ; newItem . setPackageApi ( getCurrentPackageApi ( ) ) ; newItem . setPackageImpl ( getCurrentPackageImpl ( ) ) ; classList . add ( newItem ) ;
public class SipApplicationSessionImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipApplicationSession # getTimers ( ) */ public Collection < ServletTimer > getTimers ( ) { } }
if ( ! isValid ( ) ) { throw new IllegalStateException ( "SipApplicationSession already invalidated !" ) ; } if ( servletTimers != null ) { return servletTimers . values ( ) ; } return new HashMap < String , ServletTimer > ( ) . values ( ) ;
public class StructureDiagramGenerator { /** * Returns the other atom of the bond . * Expects bond to have only two atoms . * Returns null if the given atom is not part of the given bond . * @ param atom the atom we already have * @ param bond the bond * @ return the other atom of the bond */ public IAtom getOtherBondAtom ( IAtom atom , IBond bond ) { } }
if ( ! bond . contains ( atom ) ) return null ; if ( bond . getBegin ( ) . equals ( atom ) ) return bond . getEnd ( ) ; else return bond . getBegin ( ) ;
public class SnapshotContentItem { /** * / * ( non - Javadoc ) * @ see java . util . Comparator # compare ( java . lang . Object , java . lang . Object ) */ @ Override public int compare ( SnapshotContentItem o1 , SnapshotContentItem o2 ) { } }
return o1 . contentId . compareTo ( o2 . contentId ) ;
public class RoleGraphEditingPlugin { /** * If startVertex is non - null , stretch an edge shape between * startVertex and the mouse pointer to simulate edge creation */ @ SuppressWarnings ( "unchecked" ) public void mouseDragged ( MouseEvent e ) { } }
if ( checkModifiers ( e ) ) { if ( startVertex != null ) { transformEdgeShape ( down , e . getPoint ( ) ) ; transformArrowShape ( down , e . getPoint ( ) ) ; } VisualizationViewer < String , String > vv = ( VisualizationViewer < String , String > ) e . getSource ( ) ; vv . repaint ( ) ; }
public class StringHelper { /** * Get the passed string element repeated for a certain number of times . Each * string element is simply appended at the end of the string . * @ param sElement * The string to get repeated . May not be < code > null < / code > . * @ param nRepeats * The number of repetitions to retrieve . May not be & lt ; 0. * @ return A non - < code > null < / code > string containing the string element for the * given number of times . */ @ Nonnull public static String getRepeated ( @ Nonnull final String sElement , @ Nonnegative final int nRepeats ) { } }
ValueEnforcer . notNull ( sElement , "Element" ) ; ValueEnforcer . isGE0 ( nRepeats , "Repeats" ) ; final int nElementLength = sElement . length ( ) ; // Check if result length would exceed int range if ( ( long ) nElementLength * nRepeats > Integer . MAX_VALUE ) throw new IllegalArgumentException ( "Resulting string exceeds the maximum integer length" ) ; if ( nElementLength == 0 || nRepeats == 0 ) return "" ; if ( nRepeats == 1 ) return sElement ; // use character version if ( nElementLength == 1 ) return getRepeated ( sElement . charAt ( 0 ) , nRepeats ) ; // combine via StringBuilder final StringBuilder ret = new StringBuilder ( nElementLength * nRepeats ) ; for ( int i = 0 ; i < nRepeats ; ++ i ) ret . append ( sElement ) ; return ret . toString ( ) ;
public class GeopaparazziSpatialiteCreator { /** * VARS DOCS END */ @ Execute public void process ( ) throws Exception { } }
checkNull ( inGeopaparazzi , inShapefilesFolder ) ; if ( pEncoding == null || pEncoding . trim ( ) . length ( ) == 0 ) { pEncoding = "UTF-8" ; } if ( pSizeFactor < 1 ) { pSizeFactor = 3 ; } if ( pLinesWidthFactor < 1 ) { pLinesWidthFactor = 6 ; } File shpFolder = new File ( inShapefilesFolder ) ; File [ ] shpfiles = shpFolder . listFiles ( new FilenameFilter ( ) { @ Override public boolean accept ( File dir , String name ) { return name . endsWith ( ".shp" ) ; } } ) ; if ( shpfiles . length == 0 ) { throw new ModelsIOException ( "The supplied folder doesn't contain any shapefile." , this ) ; } try ( ASpatialDb db = new GTSpatialiteThreadsafeDb ( ) ) { if ( ! db . open ( inGeopaparazzi ) ) { db . initSpatialMetadata ( null ) ; } if ( ! db . hasTable ( GeopaparazziDatabaseProperties . PROPERTIESTABLE ) ) { GeopaparazziDatabaseProperties . createPropertiesTable ( db ) ; } else { QueryResult qres1 = db . getTableRecordsMapFromRawSql ( "select * from dataproperties" , 10 ) ; pm . message ( "Dataproperties already existing: " ) ; for ( Object [ ] objs : qres1 . data ) { pm . message ( Arrays . toString ( objs ) ) ; } pm . message ( "----------------------------------" ) ; } pm . beginTask ( "Importing shapefiles..." , shpfiles . length ) ; for ( File shpFile : shpfiles ) { String name = FileUtilities . getNameWithoutExtention ( shpFile ) ; if ( db . hasTable ( name ) ) { pm . errorMessage ( "Table already existing: " + name ) ; continue ; } SimpleFeatureCollection fc = OmsVectorReader . readVector ( shpFile . getAbsolutePath ( ) ) ; SimpleFeatureType schema = fc . getSchema ( ) ; CoordinateReferenceSystem crs = schema . getCoordinateReferenceSystem ( ) ; String epsgStr = CrsUtilities . getCodeFromCrs ( crs ) ; String sirdStr = epsgStr . substring ( 5 ) ; int srid = Integer . parseInt ( sirdStr ) ; EGeometryType geomType = EGeometryType . forGeometryDescriptor ( schema . getGeometryDescriptor ( ) ) ; ESpatialiteGeometryType spatialiteGeometryType = geomType . toSpatialiteGeometryType ( ) ; HMImportExportUtils . importShapefileThroughVirtualTable ( db , name , shpFile . getAbsolutePath ( ) , pEncoding , srid , spatialiteGeometryType ) ; Style style = SldUtilities . getStyleFromFile ( shpFile ) ; if ( style != null ) { String uniqueName = "/#" + name + "#geometry" ; StyleWrapper styleWrapper = new StyleWrapper ( style ) ; List < FeatureTypeStyleWrapper > featureTypeStylesWrapperList = styleWrapper . getFeatureTypeStylesWrapperList ( ) ; if ( featureTypeStylesWrapperList . size ( ) > 0 ) { List < RuleWrapper > rulesWrapperList = new ArrayList < > ( ) ; for ( FeatureTypeStyleWrapper ftsWrapper : featureTypeStylesWrapperList ) { List < RuleWrapper > rulesWrappers = ftsWrapper . getRulesWrapperList ( ) ; rulesWrapperList . addAll ( rulesWrappers ) ; } if ( rulesWrapperList . size ( ) == 1 ) { RuleWrapper ruleWrapper = rulesWrapperList . get ( 0 ) ; SymbolizerWrapper geometrySymbolizersWrapper = ruleWrapper . getGeometrySymbolizersWrapper ( ) ; if ( geometrySymbolizersWrapper != null ) { org . hortonmachine . gears . io . geopaparazzi . styles . Style gpStyle = createBaseStyle ( db , uniqueName , rulesWrapperList ) ; populateStyleObject ( gpStyle , geometrySymbolizersWrapper ) ; GeopaparazziDatabaseProperties . updateStyle ( db , gpStyle ) ; } } else if ( rulesWrapperList . size ( ) > 1 ) { org . hortonmachine . gears . io . geopaparazzi . styles . Style gpStyle = createBaseStyle ( db , uniqueName , rulesWrapperList ) ; gpStyle . themeMap = new HashMap < > ( ) ; for ( RuleWrapper ruleWrapper : rulesWrapperList ) { SymbolizerWrapper geometrySymbolizersWrapper = ruleWrapper . getGeometrySymbolizersWrapper ( ) ; org . hortonmachine . gears . io . geopaparazzi . styles . Style themeStyle = createBaseStyle ( null , uniqueName , rulesWrapperList ) ; populateStyleObject ( themeStyle , geometrySymbolizersWrapper ) ; Filter filter = ruleWrapper . getRule ( ) . getFilter ( ) ; if ( filter instanceof IsEqualsToImpl ) { IsEqualsToImpl equalsFilter = ( IsEqualsToImpl ) filter ; Expression expression1 = equalsFilter . getExpression1 ( ) ; Expression expression2 = equalsFilter . getExpression2 ( ) ; setFilter ( gpStyle , themeStyle , expression1 ) ; setFilter ( gpStyle , themeStyle , expression2 ) ; } } GeopaparazziDatabaseProperties . updateStyle ( db , gpStyle ) ; } else { pm . errorMessage ( "Unable to export SLD for: " + shpFile ) ; continue ; } } } pm . worked ( 1 ) ; } pm . done ( ) ; QueryResult qres = db . getTableRecordsMapFromRawSql ( "select * from dataproperties" , 100 ) ; pm . message ( "Dataproperties inserted: " ) ; int theme = qres . names . indexOf ( ISpatialiteTableAndFieldsNames . THEME ) ; for ( Object [ ] objs : qres . data ) { String themeString = objs [ theme ] . toString ( ) . replaceAll ( "\\s+" , " " ) ; if ( themeString . length ( ) > 20 ) { objs [ theme ] = themeString . substring ( 0 , 15 ) + "..." ; } pm . message ( Arrays . toString ( objs ) ) ; } }
public class DataSourceCompactionConfig { /** * This method is copied from { @ code CompactionTask # getValidTargetCompactionSizeBytes } . The only difference is this * method doesn ' t check ' numShards ' which is not supported by { @ link UserCompactTuningConfig } . * Currently , we can ' t use the same method here because it ' s in a different module . Until we figure out how to reuse * the same method , this method must be synced with { @ code CompactionTask # getValidTargetCompactionSizeBytes } . */ @ Nullable private static Long getValidTargetCompactionSizeBytes ( @ Nullable Long targetCompactionSizeBytes , @ Nullable Integer maxRowsPerSegment , @ Nullable UserCompactTuningConfig tuningConfig ) { } }
if ( targetCompactionSizeBytes != null ) { Preconditions . checkArgument ( ! hasPartitionConfig ( maxRowsPerSegment , tuningConfig ) , "targetCompactionSizeBytes[%s] cannot be used with maxRowsPerSegment[%s] and maxTotalRows[%s]" , targetCompactionSizeBytes , maxRowsPerSegment , tuningConfig == null ? null : tuningConfig . getMaxTotalRows ( ) ) ; return targetCompactionSizeBytes ; } else { return hasPartitionConfig ( maxRowsPerSegment , tuningConfig ) ? null : DEFAULT_TARGET_COMPACTION_SIZE_BYTES ; }
public class LinkFactoryImpl { /** * Given a class , return the appropriate tool tip . * @ param typeElement the class to get the tool tip for . * @ return the tool tip for the appropriate class . */ private String getClassToolTip ( TypeElement typeElement , boolean isTypeLink ) { } }
Configuration configuration = m_writer . configuration ; Utils utils = configuration . utils ; if ( isTypeLink ) { return configuration . getText ( "doclet.Href_Type_Param_Title" , utils . getSimpleName ( typeElement ) ) ; } else if ( utils . isInterface ( typeElement ) ) { return configuration . getText ( "doclet.Href_Interface_Title" , utils . getPackageName ( utils . containingPackage ( typeElement ) ) ) ; } else if ( utils . isAnnotationType ( typeElement ) ) { return configuration . getText ( "doclet.Href_Annotation_Title" , utils . getPackageName ( utils . containingPackage ( typeElement ) ) ) ; } else if ( utils . isEnum ( typeElement ) ) { return configuration . getText ( "doclet.Href_Enum_Title" , utils . getPackageName ( utils . containingPackage ( typeElement ) ) ) ; } else { return configuration . getText ( "doclet.Href_Class_Title" , utils . getPackageName ( utils . containingPackage ( typeElement ) ) ) ; }
public class FactorGraph { /** * Gets a new { @ code FactorGraph } identical to this one , except with * every variable in { @ code varNumsToEliminate } marginalized out . * The returned { @ code FactorGraph } is defined on the variables in * { @ code this } , minus any of the passed - in variables . This * procedure performs variable elimination on each variable in the * order returned by the iterator over { @ code varNumsToEliminate } . * Choosing a good order ( i . e . , one with low treewidth ) can * dramatically improve the performance of this method . This method * is preferred if you wish to actively manipulate the returned * factor graph . If you simply want marginals , see * { @ link MarginalCalculator } . * @ param factor * @ return */ public FactorGraph marginalize ( Collection < Integer > varNumsToEliminate ) { } }
FactorGraph currentFactorGraph = this ; for ( Integer eliminatedVariableIndex : varNumsToEliminate ) { // Each iteration marginalizes out a single variable from // currentFactorGraph , // aggregating intermediate results in nextFactorGraph . FactorGraph nextFactorGraph = new FactorGraph ( ) ; // Copy the variables in currentFactorGraph to nextFactorGraph for ( String variableName : currentFactorGraph . getVariables ( ) . getVariableNamesArray ( ) ) { int varIndex = currentFactorGraph . getVariables ( ) . getVariableByName ( variableName ) ; if ( varIndex != eliminatedVariableIndex ) { nextFactorGraph = nextFactorGraph . addVariableWithIndex ( variableName , currentFactorGraph . variables . getVariable ( varIndex ) , varIndex ) ; } } // Identify the factors which contain the variable , which must // be // multiplied together . All other factors can be immediately // copied into // the next factor graph . List < Factor > factorsToMultiply = Lists . newArrayList ( ) ; String mulName = null ; List < Factor > currentFactors = currentFactorGraph . getFactors ( ) ; List < String > currentFactorNames = currentFactorGraph . getFactorNames ( ) ; for ( int i = 0 ; i < currentFactors . size ( ) ; i ++ ) { Factor factor = currentFactors . get ( i ) ; if ( factor . getVars ( ) . contains ( eliminatedVariableIndex ) ) { factorsToMultiply . add ( factor ) ; mulName = currentFactorNames . get ( i ) ; } else { // No variable in factor is being eliminated , so we don ' t // have to // modify it . nextFactorGraph = nextFactorGraph . addFactor ( currentFactorNames . get ( i ) , factor ) ; } } if ( factorsToMultiply . size ( ) > 0 ) { // If the variable is present , eliminate it ! Factor productFactor = Factors . product ( factorsToMultiply ) ; nextFactorGraph = nextFactorGraph . addFactor ( mulName , productFactor . marginalize ( eliminatedVariableIndex ) ) ; } currentFactorGraph = nextFactorGraph ; } return currentFactorGraph ;
public class ModbusSerialMaster { /** * Disconnects this < tt > ModbusSerialMaster < / tt > from the slave . */ public void disconnect ( ) { } }
if ( connection != null && connection . isOpen ( ) ) { connection . close ( ) ; transaction = null ; setTransaction ( null ) ; }
public class Versions { /** * Returns all minor versions for the given major version or null * if major version does not exist . * @ param major * @ return */ public String [ ] getMinors ( String major ) { } }
prepareDetailedVersions ( ) ; if ( majors . containsKey ( major ) ) { return majors . get ( major ) . toArray ( new String [ ] { } ) ; } return null ;
public class IntentMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Intent intent , ProtocolMarshaller protocolMarshaller ) { } }
if ( intent == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( intent . getIntentName ( ) , INTENTNAME_BINDING ) ; protocolMarshaller . marshall ( intent . getIntentVersion ( ) , INTENTVERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Worker { /** * We could response to a node change by attempting to modify the driver in the current worker * however it is cleaner to bring this worker down gracefully , and let it restart elsewhere * ( non - Javadoc ) * @ see org . apache . zookeeper . Watcher # process ( org . apache . zookeeper . WatchedEvent ) */ @ Override public void process ( WatchedEvent event ) { } }
logger . debug ( "recived event " + event ) ; if ( event . getState ( ) == KeeperState . Expired || event . getState ( ) == KeeperState . Disconnected ) { driver . setGoOn ( false ) ; shutdown ( ) ; } if ( event . getType ( ) == EventType . NodeDataChanged || event . getType ( ) == EventType . NodeDeleted ) { driver . setGoOn ( false ) ; shutdown ( ) ; }
public class ResourceFinder { /** * Reads the contents of the found URLs as a Strings and returns them . * Individual URLs that cannot be read are skipped and added to the * list of ' resourcesNotLoaded ' * @ param uri * @ return a list of the content of each resource URL found * @ throws IOException if classLoader . getResources throws an exception */ public List < String > findAvailableStrings ( String uri ) throws IOException { } }
resourcesNotLoaded . clear ( ) ; String fulluri = path + uri ; List < String > strings = new ArrayList < > ( ) ; Enumeration < URL > resources = getResources ( fulluri ) ; while ( resources . hasMoreElements ( ) ) { URL url = resources . nextElement ( ) ; try { String string = readContents ( url ) ; strings . add ( string ) ; } catch ( IOException notAvailable ) { resourcesNotLoaded . add ( url . toExternalForm ( ) ) ; } } return strings ;
public class Table { /** * ignore the result or throw an ArrayIndexOutOfBound exception */ @ GuardedBy ( "lock" ) protected int computeRow ( long seqno ) { } }
int diff = ( int ) ( seqno - offset ) ; if ( diff < 0 ) return diff ; return diff / elements_per_row ;
public class StateId { /** * Convenience method for creating a list of { @ code StateId } s suitable for paging through a stored state . Each * { @ code StateId } will have the same state token as { @ code this } but with different index ranges . For example , with * a page size of 200 and a total results of 420 , the list will contain 3 entries with ranges of < tt > [ 0-199 ] < / tt > , * < tt > [ 200-399 ] < / tt > and < tt > [ 400-419 ] < / tt > . * @ param pageSize The size of each page * @ param totalResults The total number of results to generate pages for * @ return { @ code StateId } s that can be used to page through a stored state */ public List < StateId > pages ( final int pageSize , final int totalResults ) { } }
Validate . isTrue ( pageSize > 0 , "Page size must be positive: was " + pageSize ) ; Validate . isTrue ( totalResults > 0 , "Total results must be positive: was " + totalResults ) ; final List < StateId > pages = new ArrayList < StateId > ( totalResults / pageSize + 1 ) ; Range range = Range . first ( pageSize ) ; for ( ; range . getEnd ( ) < totalResults - 1 ; range = range . next ( ) ) { pages . add ( forRange ( range ) ) ; } // Though not strictly required , tidy up the last range to stop at the right upper bound pages . add ( forRange ( range . getStart ( ) , totalResults - 1 ) ) ; return Collections . unmodifiableList ( pages ) ;
public class LinkHandler { /** * Initialize non - persistent fields . These fields are common to both MS * reconstitution of DestinationHandlers and initial creation . * @ param messageProcessor the message processor instance * @ param durableSubscriptionsTable the topicspace durable subscriptions * HashMap from the DestinationManager . * @ param transaction the transaction to use for non persistent * initialization . Can be null , in which case an auto transaction * will be used . * @ throws MessageStoreException if there was an error interacting with the * Message Store . * @ throws SIStoreException if there was a transaction error . */ void initializeNonPersistent ( MessageProcessor messageProcessor , HashMap durableSubscriptionsTable , TransactionCommon transaction ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "initializeNonPersistent" , new Object [ ] { messageProcessor , durableSubscriptionsTable , transaction } ) ; super . initializeNonPersistent ( messageProcessor , durableSubscriptionsTable , transaction ) ; // Required to pick where to send messages too getLinkStateHandler ( ) . setLinkManager ( messageProcessor . getLinkManager ( ) ) ; // Create a destination definition with default attributes for use by this // link handler DestinationDefinition linkDestDefinition = messageProcessor . createDestinationDefinition ( DestinationType . QUEUE , _name ) ; // Set up a suitable qos linkDestDefinition . setMaxReliability ( Reliability . ASSURED_PERSISTENT ) ; linkDestDefinition . setDefaultReliability ( Reliability . ASSURED_PERSISTENT ) ; updateDefinition ( linkDestDefinition ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "initializeNonPersistent" ) ;
public class Ra10XmlGen { /** * Output xml * @ param def definition * @ param out Writer * @ throws IOException ioException */ @ Override public void writeXmlBody ( Definition def , Writer out ) throws IOException { } }
writeConnectorVersion ( out ) ; int indent = 1 ; writeIndent ( out , indent ) ; out . write ( "<display-name>Display Name</display-name>" ) ; writeEol ( out ) ; writeIndent ( out , indent ) ; out . write ( "<vendor-name>Red Hat Inc</vendor-name>" ) ; writeEol ( out ) ; writeIndent ( out , indent ) ; out . write ( "<spec-version>1.0</spec-version>" ) ; writeEol ( out ) ; writeIndent ( out , indent ) ; out . write ( "<eis-type>Test RA</eis-type>" ) ; writeEol ( out ) ; writeIndent ( out , indent ) ; out . write ( "<version>0.1</version>" ) ; writeEol ( out ) ; writeIndent ( out , indent ) ; out . write ( "<resourceadapter>" ) ; writeEol ( out ) ; writeOutbound ( def , out , indent + 1 ) ; writeIndent ( out , indent ) ; out . write ( "</resourceadapter>" ) ; writeEol ( out ) ; out . write ( "</connector>" ) ; writeEol ( out ) ;
public class NetUtil { /** * Creates a HTTP connection to the given URL . * @ param pURL the URL to get . * @ param pProperties connection properties . * @ param pFollowRedirects specifies whether we should follow redirects . * @ param pTimeout the specified timeout , in milliseconds . * @ return a HttpURLConnection * @ throws UnknownHostException if the hostname in the URL cannot be found . * @ throws IOException if an I / O exception occurs . */ public static HttpURLConnection createHttpURLConnection ( URL pURL , Properties pProperties , boolean pFollowRedirects , int pTimeout ) throws IOException { } }
// Open the connection , and get the stream HttpURLConnection conn ; if ( pTimeout > 0 ) { // Supports timeout conn = new com . twelvemonkeys . net . HttpURLConnection ( pURL , pTimeout ) ; } else { // Faster , more compatible conn = ( HttpURLConnection ) pURL . openConnection ( ) ; } // Set user agent if ( ( pProperties == null ) || ! pProperties . containsKey ( "User-Agent" ) ) { conn . setRequestProperty ( "User-Agent" , VERSION_ID + " (" + System . getProperty ( "os.name" ) + "/" + System . getProperty ( "os.version" ) + "; " + System . getProperty ( "os.arch" ) + "; " + System . getProperty ( "java.vm.name" ) + "/" + System . getProperty ( "java.vm.version" ) + ")" ) ; } // Set request properties if ( pProperties != null ) { for ( Map . Entry < Object , Object > entry : pProperties . entrySet ( ) ) { // Properties key / values can be safely cast to strings conn . setRequestProperty ( ( String ) entry . getKey ( ) , entry . getValue ( ) . toString ( ) ) ; } } try { // Breaks with JRE1.2? conn . setInstanceFollowRedirects ( pFollowRedirects ) ; } catch ( LinkageError le ) { // This is the best we can do . . . HttpURLConnection . setFollowRedirects ( pFollowRedirects ) ; System . err . println ( "You are using an old Java Spec, consider upgrading." ) ; System . err . println ( "java.net.HttpURLConnection.setInstanceFollowRedirects(" + pFollowRedirects + ") failed." ) ; // le . printStackTrace ( System . err ) ; } conn . setDoInput ( true ) ; conn . setDoOutput ( true ) ; // conn . setUseCaches ( true ) ; return conn ;
public class CachingFileAccess { /** * Reset the Cleanup size and interval * The size and interval when started are 500 items ( memory size unknown ) checked every minute in a background thread . * @ param size * @ param interval */ public void cleanupParams ( int size , long interval ) { } }
timer . cancel ( ) ; timer . schedule ( new Cleanup ( content , size ) , interval , interval ) ;
public class IndustryApi { /** * List character industry jobs List industry jobs placed by a character - - - * This route is cached for up to 300 seconds SSO Scope : * esi - industry . read _ character _ jobs . v1 * @ param characterId * An EVE character ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param includeCompleted * Whether to retrieve completed character industry jobs . Only * includes jobs from the past 90 days ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ return List & lt ; CharacterIndustryJobsResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < CharacterIndustryJobsResponse > getCharactersCharacterIdIndustryJobs ( Integer characterId , String datasource , String ifNoneMatch , Boolean includeCompleted , String token ) throws ApiException { } }
ApiResponse < List < CharacterIndustryJobsResponse > > resp = getCharactersCharacterIdIndustryJobsWithHttpInfo ( characterId , datasource , ifNoneMatch , includeCompleted , token ) ; return resp . getData ( ) ;
public class IonWriterSystemBinary { /** * { @ inheritDoc } * The { @ link OutputStream } spec is mum regarding the behavior of flush on * a closed stream , so we shouldn ' t assume that our stream can handle that . */ public final void flush ( ) throws IOException { } }
if ( ! _closed ) { if ( atDatagramLevel ( ) && ! hasAnnotations ( ) ) { SymbolTable symtab = getSymbolTable ( ) ; if ( symtab != null && symtab . isReadOnly ( ) && symtab . isLocalTable ( ) ) { // It ' s no longer possible to add more symbols to the local // symtab , so we can safely write everything out . writeAllBufferedData ( ) ; } } _user_output_stream . flush ( ) ; }
public class DataSet { /** * Strips the data transform of all but the passed in labels * @ param labels strips the data transform of all but the passed in labels * @ return the dataset with only the specified labels */ @ Override public DataSet filterBy ( int [ ] labels ) { } }
List < DataSet > list = asList ( ) ; List < DataSet > newList = new ArrayList < > ( ) ; List < Integer > labelList = new ArrayList < > ( ) ; for ( int i : labels ) labelList . add ( i ) ; for ( DataSet d : list ) { int outcome = d . outcome ( ) ; if ( labelList . contains ( outcome ) ) { newList . add ( d ) ; } } return DataSet . merge ( newList ) ;
public class AbstractRunMojo { /** * Build a customized webapp in a directory , * applying a number of AMPs and / or JARs from alfresco maven plugin configuration . * @ param warName the name of the custom war * @ param modules the modules that should be applied to the custom war * @ param originalWarGroupId the Maven groupId for the original war file that should be customized * @ param originalWarArtifactId the Maven artifactId for the original war file that should be customized * @ param originalWarVersion the Maven version for the original war file that should be customized * @ throws MojoExecutionException when any problem appears building the custom war */ protected void buildCustomWarInDir ( String warName , List < ModuleDependency > modules , String originalWarGroupId , String originalWarArtifactId , String originalWarVersion ) throws MojoExecutionException { } }
final String warOutputDir = getWarOutputDir ( warName ) ; final String ampsModuleDir = "modules/" + warName + "/amps" ; final String ampsOutputDir = "${project.build.directory}/" + ampsModuleDir ; List < Element > ampModules = new ArrayList < > ( ) ; List < Element > jarModules = new ArrayList < > ( ) ; if ( modules != null && modules . size ( ) > 0 ) { for ( ModuleDependency moduleDep : modules ) { Element el = element ( name ( "artifactItem" ) , element ( name ( "groupId" ) , moduleDep . getGroupId ( ) ) , element ( name ( "artifactId" ) , moduleDep . getArtifactId ( ) ) , element ( name ( "version" ) , moduleDep . getVersion ( ) ) , element ( name ( "classifier" ) , moduleDep . getClassifier ( ) ) , element ( name ( "type" ) , moduleDep . getType ( ) ) , element ( name ( "overWrite" ) , "true" ) ) ; if ( moduleDep . getArtifactId ( ) . equalsIgnoreCase ( "alfresco-share-services" ) ) { // Skip if we are not running a 5.1 version of Alfresco , ' Alfresco Share Services ' // was not used in earlier versions if ( ! isPlatformVersionGtOrEqTo51 ( ) ) { getLog ( ) . info ( "Skipping alfresco-share-services, only needed for 5.1+" ) ; continue ; } } if ( moduleDep . isAmp ( ) ) { ampModules . add ( el ) ; } else if ( moduleDep . isJar ( ) ) { jarModules . add ( el ) ; } else { throw new MojoExecutionException ( "Unknown module type: " + moduleDep . getType ( ) + " when building custom " + warName + " war, only 'jar' and 'amp' types are allowed" ) ; } } } // Convert from list to array so we can add these elements below Element [ ] ampModuleArray = new Element [ ampModules . size ( ) ] ; ampModules . toArray ( ampModuleArray ) ; Element [ ] jarModuleArray = new Element [ jarModules . size ( ) ] ; jarModules . toArray ( jarModuleArray ) ; // Unpack the original war to / target / < warName > - war executeMojo ( plugin ( groupId ( "org.apache.maven.plugins" ) , artifactId ( "maven-dependency-plugin" ) , version ( MAVEN_DEPENDENCY_PLUGIN_VERSION ) ) , goal ( "unpack" ) , configuration ( element ( name ( "outputDirectory" ) , warOutputDir ) , element ( name ( "artifactItems" ) , element ( name ( "artifactItem" ) , element ( name ( "groupId" ) , originalWarGroupId ) , element ( name ( "artifactId" ) , originalWarArtifactId ) , element ( name ( "version" ) , originalWarVersion ) , element ( name ( "type" ) , "war" ) ) ) ) , execEnv ) ; if ( ampModuleArray . length > 0 ) { // Copy AMPs to target / modules / < warName > / amps so we can install them onto the WAR executeMojo ( plugin ( groupId ( "org.apache.maven.plugins" ) , artifactId ( "maven-dependency-plugin" ) , version ( MAVEN_DEPENDENCY_PLUGIN_VERSION ) ) , goal ( "copy" ) , configuration ( element ( name ( "outputDirectory" ) , ampsOutputDir ) , element ( name ( "artifactItems" ) , ampModuleArray ) ) , execEnv ) ; // Then apply all these amps to the unpacked war // Call the Alfresco Maven Plugin Install Mojo directly , so we don ' t have to keep SDK version info here String ampsLocation = project . getBuild ( ) . getDirectory ( ) + "/" + ampsModuleDir ; String warLocation = project . getBuild ( ) . getDirectory ( ) + "/" + getWarName ( warName ) ; InstallMojo installMojo = new InstallMojo ( ) ; installMojo . setAmpLocation ( new File ( ampsLocation ) ) ; installMojo . setWarLocation ( new File ( warLocation ) ) ; installMojo . setForce ( true ) ; try { installMojo . execute ( ) ; } catch ( MojoFailureException e ) { e . printStackTrace ( ) ; } } // Then copy all JAR dependencies to the unpacked war / target / < warName > - war / WEB - INF / lib if ( jarModuleArray . length > 0 ) { executeMojo ( plugin ( groupId ( "org.apache.maven.plugins" ) , artifactId ( "maven-dependency-plugin" ) , version ( MAVEN_DEPENDENCY_PLUGIN_VERSION ) ) , goal ( "copy" ) , configuration ( element ( name ( "outputDirectory" ) , warOutputDir + "/WEB-INF/lib" ) , element ( name ( "artifactItems" ) , jarModuleArray ) ) , execEnv ) ; }
public class ApiErrorExtractor { /** * Determine if a given GoogleJsonError is caused by , and only by , * account disabled error . */ public boolean accessDeniedNonRecoverable ( GoogleJsonError e ) { } }
ErrorInfo errorInfo = getErrorInfo ( e ) ; if ( errorInfo != null ) { String reason = errorInfo . getReason ( ) ; return ACCOUNT_DISABLED_REASON_CODE . equals ( reason ) || ACCESS_NOT_CONFIGURED_REASON_CODE . equals ( reason ) ; } return false ;
public class SimpleSourceFactory { /** * ( non - Javadoc ) * @ see com . sematext . ag . source . SourceFactory # create ( ) */ @ Override public Source create ( ) throws InitializationFailedException { } }
Source source = null ; try { source = sourceClass . newInstance ( ) ; } catch ( InstantiationException e ) { LOG . fatal ( "Creating source failed, " + SOURCE_CLASS_CONFIG_KEY + ": " + sourceClass , e ) ; System . exit ( 0 ) ; } catch ( IllegalAccessException e ) { LOG . fatal ( "Creating source failed, " + SOURCE_CLASS_CONFIG_KEY + ": " + sourceClass , e ) ; System . exit ( 0 ) ; } LOG . info ( "Initializing source..." ) ; source . init ( config ) ; LOG . info ( "Initializing source... DONE." ) ; return source ;
public class CredentialEncrypter { /** * Returns true if the provided bytes _ could _ be encrypted credentials , even if they can ' t be decrypted * by a specific instance . */ public static boolean isPotentiallyEncryptedBytes ( byte [ ] bytes ) { } }
checkNotNull ( bytes , "bytes" ) ; // The number of bytes is a non - zero multiple of the block size . try { return bytes . length != 0 && bytes . length % Cipher . getInstance ( CIPHER ) . getBlockSize ( ) == 0 ; } catch ( Throwable t ) { // This shouldn ' t happen since AES is supported by all JVMs . throw Throwables . propagate ( t ) ; }
public class NrqlAlertConditionService { /** * Updates the given NRQL alert condition . * @ param condition The alert condition to update * @ return The alert condition that was updated */ public Optional < NrqlAlertCondition > update ( NrqlAlertCondition condition ) { } }
return HTTP . PUT ( String . format ( "/v2/alerts_nrql_conditions/%d.json" , condition . getId ( ) ) , condition , NRQL_ALERT_CONDITION ) ;
public class PactDslWithProvider { /** * Description of the request that is expected to be received * @ param description request description */ public PactDslRequestWithoutPath uponReceiving ( String description ) { } }
return new PactDslWithState ( consumerPactBuilder , consumerPactBuilder . getConsumerName ( ) , providerName , defaultRequestValues , defaultResponseValues ) . uponReceiving ( description ) ;
public class ApiClientMgr { /** * 重新创建apiclient * 2种情况需要重新创建 : 1 、 首次 2 、 client的状态已经紊乱 * @ return 新创建的client */ private HuaweiApiClient resetApiClient ( ) { } }
if ( context == null ) { HMSAgentLog . e ( "HMSAgent not init" ) ; return null ; } synchronized ( APICLIENT_LOCK ) { if ( apiClient != null ) { // 对于老的apiClient , 1分钟后才丢弃 , 防止外面正在使用过程中这边disConnect了 disConnectClientDelay ( apiClient , 60000 ) ; } HMSAgentLog . d ( "reset client" ) ; // 这种重置client , 极端情况可能会出现2个client都回调结果的情况 。 此时可能出现rstCode = 0 , 但是client无效 。 // 因为业务调用封装中都进行了一次重试 。 所以不会有问题 HuaweiIdSignInOptions signInOptions = new HuaweiIdSignInOptions . Builder ( HuaweiIdSignInOptions . DEFAULT_SIGN_IN ) . requestAccessToken ( ) . requestOpenId ( ) . requestUnionId ( ) . build ( ) ; apiClient = new HuaweiApiClient . Builder ( context ) . addApi ( HuaweiPay . PAY_API ) . addApi ( HuaweiGame . GAME_API ) . addApi ( HuaweiSns . API ) . addApi ( HuaweiId . SIGN_IN_API , signInOptions ) . addApi ( HuaweiPush . PUSH_API ) . addConnectionCallbacks ( INST ) . addOnConnectionFailedListener ( INST ) . build ( ) ; return apiClient ; }
public class HttpJsonSerializer { /** * Parses a single Tree object * < b > Note : < / b > Incoming data is a hash map of strings instead of directly * deserializing to a tree . We do it this way because we don ' t want users * messing with the timestamp fields . * @ return A parsed Tree * @ throws JSONException if parsing failed * @ throws BadRequestException if the content was missing or parsing failed */ public Tree parseTreeV1 ( ) { } }
final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { final HashMap < String , String > properties = JSON . parseToObject ( json , TR_HASH_MAP ) ; final Tree tree = new Tree ( ) ; for ( Map . Entry < String , String > entry : properties . entrySet ( ) ) { // skip nulls , empty is fine , but nulls are not welcome here if ( entry . getValue ( ) == null ) { continue ; } if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "treeid" ) ) { tree . setTreeId ( Integer . parseInt ( entry . getValue ( ) ) ) ; } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "name" ) ) { tree . setName ( entry . getValue ( ) ) ; } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "description" ) ) { tree . setDescription ( entry . getValue ( ) ) ; } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "notes" ) ) { tree . setNotes ( entry . getValue ( ) ) ; } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "enabled" ) ) { if ( entry . getValue ( ) . toLowerCase ( ) . equals ( "true" ) ) { tree . setEnabled ( true ) ; } else { tree . setEnabled ( false ) ; } } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "strictmatch" ) ) { if ( entry . getValue ( ) . toLowerCase ( ) . equals ( "true" ) ) { tree . setStrictMatch ( true ) ; } else { tree . setStrictMatch ( false ) ; } } else if ( entry . getKey ( ) . toLowerCase ( ) . equals ( "storefailures" ) ) { if ( entry . getValue ( ) . toLowerCase ( ) . equals ( "true" ) ) { tree . setStoreFailures ( true ) ; } else { tree . setStoreFailures ( false ) ; } } } return tree ; } catch ( NumberFormatException nfe ) { throw new BadRequestException ( "Unable to parse 'tree' value" ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; }
public class ArrayListBag { @ SuppressWarnings ( "unchecked" ) @ Override public void bag ( ArrayList < Object > unBaggedObject ) { } }
if ( unBaggedObject == null || unBaggedObject . isEmpty ( ) ) return ; arrayObj = new Object [ unBaggedObject . size ( ) ] ; Object obj = null ; for ( int i = 0 ; i < arrayObj . length ; i ++ ) { obj = unBaggedObject . get ( i ) ; if ( obj instanceof Date ) obj = new DateBag ( ( Date ) obj ) ; else if ( obj instanceof ArrayList ) obj = new ArrayListBag ( ( ArrayList < Object > ) obj ) ; arrayObj [ i ] = obj ; }
public class AmazonWorkspacesClient { /** * Deletes the specified image from your account . To delete an image , you must first delete any bundles that are * associated with the image . * @ param deleteWorkspaceImageRequest * @ return Result of the DeleteWorkspaceImage operation returned by the service . * @ throws ResourceAssociatedException * The resource is associated with a directory . * @ throws InvalidResourceStateException * The state of the resource is not valid for this operation . * @ throws AccessDeniedException * The user is not authorized to access a resource . * @ sample AmazonWorkspaces . DeleteWorkspaceImage * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / workspaces - 2015-04-08 / DeleteWorkspaceImage " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DeleteWorkspaceImageResult deleteWorkspaceImage ( DeleteWorkspaceImageRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteWorkspaceImage ( request ) ;
public class CmsSite { /** * Gets all site matchers which should be used for matching the site . < p > * @ return all site matchers to be used for matching the site */ public List < CmsSiteMatcher > getAllMatchers ( ) { } }
List < CmsSiteMatcher > result = Lists . newArrayList ( ) ; switch ( getSSLMode ( ) ) { case LETS_ENCRYPT : case MANUAL_EP_TERMINATION : List < CmsSiteMatcher > baseMatchers = Lists . newArrayList ( ) ; baseMatchers . add ( m_siteMatcher ) ; for ( CmsSiteMatcher alias : m_aliases ) { baseMatchers . add ( alias ) ; } // For each matcher , compute both a https and http variant . // Store them in a map , so we don ' t get duplicates if the variants have both // been manually defined Map < String , CmsSiteMatcher > matchersByUrl = Maps . newHashMap ( ) ; for ( CmsSiteMatcher matcher : baseMatchers ) { CmsSiteMatcher httpMatcher = matcher . forDifferentScheme ( "http" ) ; CmsSiteMatcher httpsMatcher = matcher . forDifferentScheme ( "https" ) ; for ( CmsSiteMatcher current : new CmsSiteMatcher [ ] { httpMatcher , httpsMatcher } ) { matchersByUrl . put ( current . getUrl ( ) , current ) ; } } return Lists . newArrayList ( matchersByUrl . values ( ) ) ; case NO : case SECURE_SERVER : case MANUAL : default : result = Lists . newArrayList ( ) ; result . add ( m_siteMatcher ) ; for ( CmsSiteMatcher alias : m_aliases ) { result . add ( alias ) ; } if ( m_secureServer != null ) { result . add ( m_secureServer ) ; } return result ; }
public class SimonVisitors { /** * Visit Simons recursively as a tree starting from the specified Simon . * @ param simon Parent simon * @ param visitor Visitor * @ throws IOException */ public static void visitTree ( Simon simon , SimonVisitor visitor ) throws IOException { } }
visitor . visit ( simon ) ; for ( Simon childSimon : simon . getChildren ( ) ) { visitTree ( childSimon , visitor ) ; }
public class HTTPFaxClientSpi { /** * This function will submit a new fax job . < br > * The fax job ID may be populated by this method in the provided * fax job object . * @ param faxJob * The fax job object containing the needed information */ @ Override protected void submitFaxJobImpl ( FaxJob faxJob ) { } }
// create HTTP request HTTPRequest httpRequest = this . createSubmitFaxJobHTTPRequest ( faxJob ) ; // submit HTTP request this . submitHTTPRequest ( faxJob , httpRequest , FaxActionType . SUBMIT_FAX_JOB ) ;
public class PolicyViolationError { /** * Gets the violatingParts value for this PolicyViolationError . * @ return violatingParts * Lists the parts that violate the policy . */ public com . google . api . ads . adwords . axis . v201809 . rm . PolicyViolationErrorPart [ ] getViolatingParts ( ) { } }
return violatingParts ;
public class GetUploadRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetUploadRequest getUploadRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getUploadRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getUploadRequest . getArn ( ) , ARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GroovyMBean { /** * Description of the specified attribute name . * @ param attr - the attribute * @ return String the description */ protected String describeAttribute ( MBeanAttributeInfo attr ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( "(" ) ; if ( attr . isReadable ( ) ) { buf . append ( "r" ) ; } if ( attr . isWritable ( ) ) { buf . append ( "w" ) ; } buf . append ( ") " ) . append ( attr . getType ( ) ) . append ( " " ) . append ( attr . getName ( ) ) ; return buf . toString ( ) ;
public class Checks { /** * Checks that passed reference is not null , * throws { @ link NullPointerException } with passed message if reference is null * @ param object to check * @ param message exception message if object is null */ public static void checkNotNull ( @ Nullable Object object , @ NonNull String message ) { } }
if ( object == null ) { throw new NullPointerException ( message ) ; }
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getMediaEjectControlReserved ( ) { } }
if ( mediaEjectControlReservedEEnum == null ) { mediaEjectControlReservedEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 92 ) ; } return mediaEjectControlReservedEEnum ;
public class InventoryNavigator { /** * Get the ManagedObjectReference for an item under the * specified parent node that has the type and name specified . * @ param type type of the managed object * @ param name name to match * @ return First ManagedEntity object of the type / name pair found * @ throws RemoteException * @ throws RuntimeFault * @ throws InvalidProperty */ public ManagedEntity searchManagedEntity ( String type , String name ) throws InvalidProperty , RuntimeFault , RemoteException { } }
if ( name == null || name . length ( ) == 0 ) { return null ; } if ( type == null ) { type = "ManagedEntity" ; } String [ ] [ ] typeinfo = new String [ ] [ ] { new String [ ] { type , "name" , } , } ; ObjectContent [ ] ocs = retrieveObjectContents ( typeinfo , true ) ; if ( ocs == null || ocs . length == 0 ) { return null ; } for ( int i = 0 ; i < ocs . length ; i ++ ) { DynamicProperty [ ] propSet = ocs [ i ] . getPropSet ( ) ; if ( propSet . length > 0 ) { String nameInPropSet = ( String ) propSet [ 0 ] . getVal ( ) ; if ( name . equalsIgnoreCase ( nameInPropSet ) ) { ManagedObjectReference mor = ocs [ i ] . getObj ( ) ; return MorUtil . createExactManagedEntity ( rootEntity . getServerConnection ( ) , mor ) ; } } } return null ;
public class ModelsImpl { /** * Deletes an intent classifier from the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param intentId The intent classifier ID . * @ param deleteIntentOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatus > deleteIntentAsync ( UUID appId , String versionId , UUID intentId , DeleteIntentOptionalParameter deleteIntentOptionalParameter , final ServiceCallback < OperationStatus > serviceCallback ) { } }
return ServiceFuture . fromResponse ( deleteIntentWithServiceResponseAsync ( appId , versionId , intentId , deleteIntentOptionalParameter ) , serviceCallback ) ;
public class OntopOptimizationConfigurationImpl { /** * To be overloaded */ @ Override protected Stream < Module > buildGuiceModules ( ) { } }
return Stream . concat ( super . buildGuiceModules ( ) , Stream . of ( new OntopOptimizationModule ( this ) ) ) ;
public class CmsShellCommands { /** * Changes the access control for a given resource and a given principal ( user / group ) . * @ param resourceName name of the resource * @ param principalType the type of the principal ( group or user ) * @ param principalName name of the principal * @ param permissionString the permissions in the format ( ( + | - ) ( r | w | v | c | i ) ) * * @ throws CmsException if something goes wrong * @ see CmsObject # chacc ( String , String , String , String ) */ public void chacc ( String resourceName , String principalType , String principalName , String permissionString ) throws CmsException { } }
m_cms . lockResource ( resourceName ) ; if ( I_CmsPrincipal . PRINCIPAL_GROUP . equalsIgnoreCase ( principalType . trim ( ) ) ) { principalName = OpenCms . getImportExportManager ( ) . translateGroup ( principalName ) ; } else { principalName = OpenCms . getImportExportManager ( ) . translateUser ( principalName ) ; } m_cms . chacc ( resourceName , principalType , principalName , permissionString ) ;
public class Flowable { /** * Converts a { @ link Future } into a Publisher . * < img width = " 640 " height = " 315 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / from . Future . png " alt = " " > * You can convert any object that supports the { @ link Future } interface into a Publisher that emits the * return value of the { @ link Future # get } method of that object by passing the object into the { @ code from } * method . * < em > Important note : < / em > This Publisher is blocking on the thread it gets subscribed on ; you cannot cancel it . * Unlike 1 . x , canceling the Flowable won ' t cancel the future . If necessary , one can use composition to achieve the * cancellation effect : { @ code futurePublisher . doOnCancel ( ( ) - > future . cancel ( true ) ) ; } . * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > The operator honors backpressure from downstream . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code fromFuture } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param future * the source { @ link Future } * @ param < T > * the type of object that the { @ link Future } returns , and also the type of item to be emitted by * the resulting Publisher * @ return a Flowable that emits the item from the source { @ link Future } * @ see < a href = " http : / / reactivex . io / documentation / operators / from . html " > ReactiveX operators documentation : From < / a > */ @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . FULL ) @ SchedulerSupport ( SchedulerSupport . NONE ) public static < T > Flowable < T > fromFuture ( Future < ? extends T > future ) { } }
ObjectHelper . requireNonNull ( future , "future is null" ) ; return RxJavaPlugins . onAssembly ( new FlowableFromFuture < T > ( future , 0L , null ) ) ;
public class OMMapBufferEntry { /** * Flushes the memory mapped buffer to disk only if it ' s dirty . * @ return true if the buffer has been successfully flushed , otherwise false . */ boolean flush ( ) { } }
if ( ! dirty ) return true ; acquireExclusiveLock ( ) ; try { final long timer = OProfiler . getInstance ( ) . startChrono ( ) ; // FORCE THE WRITE OF THE BUFFER for ( int i = 0 ; i < FORCE_RETRY ; ++ i ) { try { buffer . force ( ) ; dirty = false ; break ; } catch ( Exception e ) { OLogManager . instance ( ) . debug ( this , "Cannot write memory buffer to disk. Retrying (" + ( i + 1 ) + "/" + FORCE_RETRY + ")..." ) ; OMemoryWatchDog . freeMemory ( FORCE_DELAY ) ; } } if ( dirty ) OLogManager . instance ( ) . debug ( this , "Cannot commit memory buffer to disk after %d retries" , FORCE_RETRY ) ; else OProfiler . getInstance ( ) . updateCounter ( "system.file.mmap.pagesCommitted" , 1 ) ; OProfiler . getInstance ( ) . stopChrono ( "system.file.mmap.commitPages" , timer ) ; return ! dirty ; } finally { releaseExclusiveLock ( ) ; }
public class KeyRangeIterable { /** * Convenience function to add the correct { @ link StackedIterator } to the key * range Iterator stack . * This method is used to add a < em > single < / em > < em > projected < / em > constraint * to the stack . */ private static Iterator < MarkerRange . Builder > addProjected ( Predicate projected , String name , Iterator < MarkerRange . Builder > inner ) { } }
if ( projected instanceof In ) { return new SetIterator ( ( In ) projected , name , inner ) ; } else if ( projected instanceof Range ) { return new RangeIterator ( name , ( Range ) projected , inner ) ; } else { return inner ; }
public class TokenUtils { /** * Read a PEM encoded public key from the classpath * @ param pemResName - key file resource name * @ return PublicKey * @ throws Exception on decode failure */ public static PublicKey readPublicKey ( String pemResName ) throws Exception { } }
InputStream contentIS = TokenUtils . class . getResourceAsStream ( pemResName ) ; byte [ ] tmp = new byte [ 4096 ] ; int length = contentIS . read ( tmp ) ; PublicKey publicKey = decodePublicKey ( new String ( tmp , 0 , length ) ) ; return publicKey ;
public class SubscriptionService { /** * Changes the amount of a subscription . < br > * < br > * The new amount is valid one - time only after which the original subscription amount will be charged again . If you * want to permanently change the amount use { @ link SubscriptionService # changeAmount ( String , Integer ) } * @ param subscription the subscription . * @ param amount the new amount . * @ return the updated subscription . */ public Subscription changeAmountTemporary ( Subscription subscription , Integer amount ) { } }
return changeAmount ( subscription , amount , 0 , null , null ) ;
public class ParosTableContext { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableContext # getDataForContext ( int ) */ @ Override public synchronized List < RecordContext > getDataForContext ( int contextId ) throws DatabaseException { } }
try { List < RecordContext > result = new ArrayList < > ( ) ; psGetAllDataForContext . setInt ( 1 , contextId ) ; try ( ResultSet rs = psGetAllDataForContext . executeQuery ( ) ) { while ( rs . next ( ) ) { result . add ( new RecordContext ( rs . getLong ( DATAID ) , rs . getInt ( CONTEXTID ) , rs . getInt ( TYPE ) , rs . getString ( DATA ) ) ) ; } } return result ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; }
public class UserManager { /** * Get the user with the specified email address * @ param sEmailAddress * The email address to be checked . May be < code > null < / code > . * @ return < code > null < / code > if no such user exists * @ see # getUserOfEmailAddressIgnoreCase ( String ) */ @ Nullable public IUser getUserOfEmailAddress ( @ Nullable final String sEmailAddress ) { } }
if ( StringHelper . hasNoText ( sEmailAddress ) ) return null ; return findFirst ( x -> sEmailAddress . equals ( x . getEmailAddress ( ) ) ) ;
public class GetInventoryResult { /** * Collection of inventory entities such as a collection of instance inventory . * @ return Collection of inventory entities such as a collection of instance inventory . */ public java . util . List < InventoryResultEntity > getEntities ( ) { } }
if ( entities == null ) { entities = new com . amazonaws . internal . SdkInternalList < InventoryResultEntity > ( ) ; } return entities ;
public class Heritrix3Wrapper { /** * Returns the job state object given a jobname . * @ param jobname job name * @ return job state */ public JobResult job ( String jobname ) { } }
HttpGet getRequest = new HttpGet ( baseUrl + "job/" + jobname ) ; getRequest . addHeader ( "Accept" , "application/xml" ) ; return jobResult ( getRequest ) ;
public class CommerceTaxFixedRatePersistenceImpl { /** * Removes all the commerce tax fixed rates where commerceTaxMethodId = & # 63 ; from the database . * @ param commerceTaxMethodId the commerce tax method ID */ @ Override public void removeByCommerceTaxMethodId ( long commerceTaxMethodId ) { } }
for ( CommerceTaxFixedRate commerceTaxFixedRate : findByCommerceTaxMethodId ( commerceTaxMethodId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceTaxFixedRate ) ; }
public class ThriftDataHandler { /** * Populate entity from slice . * @ param m the m * @ param relationNames the relation names * @ param isWrapReq the is wrap req * @ param e the e * @ param columnOrSuperColumnsFromRow the column or super columns from row * @ return the object * @ throws CharacterCodingException the character coding exception */ private Object populateEntityFromSlice ( EntityMetadata m , List < String > relationNames , boolean isWrapReq , Object e , Map < ByteBuffer , List < ColumnOrSuperColumn > > columnOrSuperColumnsFromRow ) throws CharacterCodingException { } }
ThriftDataResultHelper dataGenerator = new ThriftDataResultHelper ( ) ; for ( ByteBuffer key : columnOrSuperColumnsFromRow . keySet ( ) ) { Object id = PropertyAccessorHelper . getObject ( m . getIdAttribute ( ) . getJavaType ( ) , key . array ( ) ) ; ThriftRow tr = new ThriftRow ( ) ; tr . setColumnFamilyName ( m . getTableName ( ) ) ; tr . setId ( id ) ; tr = dataGenerator . translateToThriftRow ( columnOrSuperColumnsFromRow , m . isCounterColumnType ( ) , m . getType ( ) , tr ) ; e = populateEntity ( tr , m , e , relationNames , isWrapReq ) ; } return e ;
public class AbstractWebSink { /** * a convenience method for creating a metrics cache */ < K , V > Cache < K , V > createCache ( ) { } }
return CacheBuilder . newBuilder ( ) . maximumSize ( cacheMaxSize ) . expireAfterWrite ( cacheTtlSeconds , TimeUnit . SECONDS ) . ticker ( cacheTicker ) . build ( ) ;
public class LoadPermissionModifications { /** * The load permissions to add . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAdd ( java . util . Collection ) } or { @ link # withAdd ( java . util . Collection ) } if you want to override the * existing values . * @ param add * The load permissions to add . * @ return Returns a reference to this object so that method calls can be chained together . */ public LoadPermissionModifications withAdd ( LoadPermissionRequest ... add ) { } }
if ( this . add == null ) { setAdd ( new com . amazonaws . internal . SdkInternalList < LoadPermissionRequest > ( add . length ) ) ; } for ( LoadPermissionRequest ele : add ) { this . add . add ( ele ) ; } return this ;
public class DescribeResourceRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeResourceRequest describeResourceRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeResourceRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeResourceRequest . getOrganizationId ( ) , ORGANIZATIONID_BINDING ) ; protocolMarshaller . marshall ( describeResourceRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FailureInfoMarshaller { /** * Marshall the given parameter object . */ public void marshall ( FailureInfo failureInfo , ProtocolMarshaller protocolMarshaller ) { } }
if ( failureInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( failureInfo . getStatusCode ( ) , STATUSCODE_BINDING ) ; protocolMarshaller . marshall ( failureInfo . getErrorCode ( ) , ERRORCODE_BINDING ) ; protocolMarshaller . marshall ( failureInfo . getErrorMessage ( ) , ERRORMESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Queue { /** * Release reserved message after specified time . If there is no message with such id on the queue , an * EmptyQueueException is thrown . * @ param message The message to release . * @ param delay The time after which the message will be released . * @ throws io . iron . ironmq . HTTPException If the IronMQ service returns a status other than 200 OK . * @ throws java . io . IOException If there is an error accessing the IronMQ server . */ public void releaseMessage ( Message message , int delay ) throws IOException { } }
releaseMessage ( message . getId ( ) , message . getReservationId ( ) , new Long ( delay ) ) ;
public class Boxing { /** * Transforms an array of { @ code Boolean } , { @ code Character } , or { @ code Number } * into a primitive array . * @ param src source array * @ param srcPos start position * @ param len length * @ return primitive array */ public static Object unboxAll ( Object [ ] src , int srcPos , int len ) { } }
if ( srcPos >= src . length ) { throw new IndexOutOfBoundsException ( String . valueOf ( srcPos ) ) ; } Class < ? > type = src [ srcPos ] . getClass ( ) ; return unboxAll ( type , src , srcPos , len ) ;
public class DataCubeAPI { /** * 拉取会员卡数据 < br > * 1 . 查询时间区间需 & lt ; = 62天 , 否则报错 ; < br > * 2 . 传入时间格式需严格参照示例填写如 ” 2015-06-15 ” , 否则报错 ; < br > * 3 . 该接口只能拉取非当天的数据 , 不能拉取当天的卡券数据 , 否则报错 。 < br > * @ param access _ token access _ token * @ param memberCardCube memberCardCube * @ return result */ public static MemberCardInfoResult getCardMemberCardInfo ( String access_token , MemberCardInfo memberCardCube ) { } }
return getCardMemberCardInfo ( access_token , JsonUtil . toJSONString ( memberCardCube ) ) ;
public class JavaClasspathParser { /** * Reads entry of a . classpath file . * @ param projectName * - the name of project containing the . classpath file * @ param projectRootAbsoluteFullPath * - the path to project containing the . classpath file * @ param unknownElements * - map of unknow elements * @ return the set of CLasspath Entries extracted from the . classpath * @ throws CoreException * - exception during parsing of . classpath * @ throws IOException * - exception during parsing of . classpath * @ throws ClasspathEntry . AssertionFailedException * - exception during parsing of . classpath * @ throws URISyntaxException * - exception during parsing of . classpath */ @ SuppressWarnings ( "checkstyle:innerassignment" ) public static IClasspathEntry [ ] [ ] readFileEntriesWithException ( String projectName , URL projectRootAbsoluteFullPath , Map < IPath , UnknownXmlElements > unknownElements ) throws CoreException , IOException , ClasspathEntry . AssertionFailedException , URISyntaxException { } }
final URL rscFile = new URL ( projectRootAbsoluteFullPath . toExternalForm ( ) . concat ( JavaProject . CLASSPATH_FILENAME ) ) ; byte [ ] bytes ; // when a project is imported , we get a first delta for the addition of the . project , but the . classpath is not accessible // so default to using java . io . File // see https : / / bugs . eclipse . org / bugs / show _ bug . cgi ? id = 96258 final URI location ; try { location = rscFile . toURI ( ) ; } catch ( URISyntaxException e ) { throw e ; } if ( location == null ) { throw new IOException ( "Cannot obtain a location URI for " + rscFile ) ; // $ NON - NLS - 1 $ } final File file = Util . toLocalFile ( location , null /* no progress monitor available */ ) ; if ( file == null ) { throw new IOException ( "Unable to fetch file from " + location ) ; // $ NON - NLS - 1 $ } try { bytes = org . eclipse . jdt . internal . compiler . util . Util . getFileByteContent ( file ) ; } catch ( IOException e ) { throw e ; } if ( hasUTF8BOM ( bytes ) ) { // see https : / / bugs . eclipse . org / bugs / show _ bug . cgi ? id = 240034 final int length = bytes . length - IContentDescription . BOM_UTF_8 . length ; System . arraycopy ( bytes , IContentDescription . BOM_UTF_8 . length , bytes = new byte [ length ] , 0 , length ) ; } String xmlClasspath ; try { // . classpath always encoded with UTF - 8 xmlClasspath = new String ( bytes , org . eclipse . jdt . internal . compiler . util . Util . UTF_8 ) ; } catch ( UnsupportedEncodingException e ) { Util . log ( e , "Could not read .classpath with UTF-8 encoding" ) ; // $ NON - NLS - 1 $ // fallback to default xmlClasspath = new String ( bytes ) ; } return decodeClasspath ( projectName , Path . fromPortableString ( projectRootAbsoluteFullPath . getPath ( ) ) , xmlClasspath , unknownElements ) ;
public class IO { /** * Provide a skip fully method . Either skips the requested number of bytes * or throws an IOException ; * @ param in * The input stream on which to perform the skip * @ param bytes * Number of bytes to skip * @ throws EOFException * if we reach EOF and still need to skip more bytes * @ throws IOException * if in . skip throws an IOException */ public static void skipFully ( InputStream in , long bytes ) throws IOException { } }
if ( bytes < 0 ) { throw new IllegalArgumentException ( "Can't skip " + bytes + " bytes" ) ; } long remaining = bytes ; while ( remaining > 0 ) { long skipped = in . skip ( remaining ) ; if ( skipped <= 0 ) { throw new EOFException ( "Reached EOF while trying to skip a total of " + bytes ) ; } remaining -= skipped ; }
public class CommerceCurrencyPersistenceImpl { /** * Caches the commerce currencies in the entity cache if it is enabled . * @ param commerceCurrencies the commerce currencies */ @ Override public void cacheResult ( List < CommerceCurrency > commerceCurrencies ) { } }
for ( CommerceCurrency commerceCurrency : commerceCurrencies ) { if ( entityCache . getResult ( CommerceCurrencyModelImpl . ENTITY_CACHE_ENABLED , CommerceCurrencyImpl . class , commerceCurrency . getPrimaryKey ( ) ) == null ) { cacheResult ( commerceCurrency ) ; } else { commerceCurrency . resetOriginalValues ( ) ; } }
public class TransformStatistics { /** * Extracts the row , column , and matrix summations based on entries in * the given { @ link Matrix } . If { @ code countRowOccurrances } is true , the * number of non zeros in each row will be counted for the row summation . * If { @ code countColumnOccurrances } is true , the same will be done for the * columns . In either case , the matrix summation will remain the same . * @ param inputMatrixFfile a { @ link Matrix } file to sum over * @ param format the matrix { @ link Format } of { @ code inputMatrixFile } * @ param countRowOccurrances true if the row summation should only count * the number of non zero values in a row * @ param countColumnOccurrances true if the column summation should only * count the number of non zero values in a column * @ return a { @ link MatrixStatistics } instance containing the summations */ public static MatrixStatistics extractStatistics ( File inputMatrixFile , Format format , boolean countRowOccurrances , boolean countColumnOccurrances ) { } }
// Initialize the statistics . int numColumns = 0 ; int numRows = 0 ; double matrixSum = 0 ; Map < Integer , Double > rowCountMap = new IntegerMap < Double > ( ) ; Map < Integer , Double > colCountMap = new IntegerMap < Double > ( ) ; // Get an iterator for the matrix file . Iterator < MatrixEntry > iter ; try { iter = MatrixIO . getMatrixFileIterator ( inputMatrixFile , format ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } while ( iter . hasNext ( ) ) { MatrixEntry entry = iter . next ( ) ; // Get the total number of columns and rows . if ( entry . column ( ) >= numColumns ) numColumns = entry . column ( ) + 1 ; if ( entry . row ( ) >= numRows ) numRows = entry . row ( ) + 1 ; // Skip non zero entries . if ( entry . value ( ) == 0d ) continue ; // Gather the row sums . Double occurance = rowCountMap . get ( entry . row ( ) ) ; double rowDelta = ( countRowOccurrances ) ? 1 : entry . value ( ) ; rowCountMap . put ( entry . row ( ) , ( occurance == null ) ? rowDelta : occurance + rowDelta ) ; // Gather the column sums . occurance = colCountMap . get ( entry . column ( ) ) ; double columnDelta = ( countColumnOccurrances ) ? 1 : entry . value ( ) ; colCountMap . put ( entry . column ( ) , ( occurance == null ) ? columnDelta : occurance + columnDelta ) ; matrixSum += entry . value ( ) ; } // Convert the maps to arrays . double [ ] rowSums = extractValues ( rowCountMap , numRows ) ; double [ ] columnSums = extractValues ( colCountMap , numColumns ) ; return new MatrixStatistics ( rowSums , columnSums , matrixSum ) ;
public class MessageListenerFilterList { /** * Free . */ public void free ( ) { } }
Iterator < BaseMessageFilter > iterator = this . iterator ( ) ; while ( iterator . hasNext ( ) ) { BaseMessageFilter messageFilter = iterator . next ( ) ; boolean bFilterOwner = false ; if ( messageFilter . getMessageListener ( 0 ) == this ) bFilterOwner = true ; messageFilter . removeFilterMessageListener ( m_messageListener ) ; if ( ( messageFilter . getMessageListener ( 0 ) == null ) || ( bFilterOwner ) ) messageFilter . free ( ) ; iterator = this . iterator ( ) ; // Since I modified the iterator in this method . } this . clear ( ) ; m_messageListener = null ;
public class StrSubstitutor { /** * Replaces all the occurrences of variables within the given source buffer * with their matching values from the resolver . * The buffer is updated with the result . * Only the specified portion of the buffer will be processed . * The rest of the buffer is not processed , but it is not deleted . * @ param source the buffer to replace in , updated , null returns zero * @ param offset the start offset within the array , must be valid * @ param length the length within the buffer to be processed , must be valid * @ return true if altered */ public boolean replaceIn ( final StringBuffer source , final int offset , final int length ) { } }
if ( source == null ) { return false ; } final StrBuilder buf = new StrBuilder ( length ) . append ( source , offset , length ) ; if ( substitute ( buf , 0 , length ) == false ) { return false ; } source . replace ( offset , offset + length , buf . toString ( ) ) ; return true ;
public class AbstractMetamodelDeclarationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case XtextPackage . ABSTRACT_METAMODEL_DECLARATION__EPACKAGE : setEPackage ( ( EPackage ) null ) ; return ; case XtextPackage . ABSTRACT_METAMODEL_DECLARATION__ALIAS : setAlias ( ALIAS_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class GdxUtilities { /** * Attempts to close the application on each platform . Calls { @ link Application # exit ( ) } on regular platforms and * manually calls { @ link ApplicationListener # dispose ( ) } on GWT , as it doesn ' t implement exit method properly . * Null - safe , this method will have an effect only if both { @ link Application } and { @ link ApplicationListener } are * created and assigned . */ public static void exit ( ) { } }
final Application application = Gdx . app ; if ( application == null ) { return ; } else if ( isRunningOnGwt ( ) ) { // GWT Application # exit ( ) implementation is empty . Disposing manually . if ( application . getApplicationListener ( ) != null ) { application . getApplicationListener ( ) . dispose ( ) ; // Application is now destroyed - silencing the ( expected ) rendering exceptions : application . setLogLevel ( Application . LOG_NONE ) ; } } else { application . exit ( ) ; }
public class CuckooFilter { /** * Creates a new { @ code CuckooFilter } that ' s a copy of this instance . The * new instance is equal to this instance but shares no mutable state . Note * that further { @ code # put ( Object ) } } operations < i > may < / i > cause a copy to * diverge even if the same operations are performed to both filters since * bucket swaps are essentially random . * @ return a copy of the filter */ public CuckooFilter < T > copy ( ) { } }
victimLock . readLock ( ) ; bucketLocker . lockAllBucketsRead ( ) ; try { return new CuckooFilter < > ( hasher . copy ( ) , table . copy ( ) , count , hasVictim , victim . copy ( ) , expectedConcurrency ) ; } finally { bucketLocker . unlockAllBucketsRead ( ) ; victimLock . tryUnlockRead ( ) ; }
public class WorkUnitContext { /** * All the { @ link Executor } s that jointly execute a { @ link Task } call this method to synchronize on the start . */ public void synchronizeStart ( ) throws InterruptedException { } }
startLatch . synchronize ( ) ; // the main thread will send a notification Executor e = Executor . currentExecutor ( ) ; WorkUnit wu = e . getCurrentWorkUnit ( ) ; if ( wu . isMainWork ( ) ) { future . start . set ( e . getCurrentExecutable ( ) ) ; }
public class RepairingNsStreamWriter { /** * Element copier method implementation suitable for use with * namespace - aware writers in repairing mode . * The trickiest thing is having to properly * order calls to < code > setPrefix < / code > , < code > writeNamespace < / code > * and < code > writeStartElement < / code > ; the order writers expect is * bit different from the order in which element information is * passed in . */ @ Override public final void copyStartElement ( InputElementStack elemStack , AttributeCollector ac ) throws IOException , XMLStreamException { } }
/* In case of repairing stream writer , we can actually just * go ahead and first output the element : stream writer should * be able to resolve namespace mapping for the element * automatically , as necessary . */ String prefix = elemStack . getPrefix ( ) ; String uri = elemStack . getNsURI ( ) ; writeStartElement ( prefix , elemStack . getLocalName ( ) , uri ) ; /* 04 - Sep - 2006 , TSa : Although we could really just ignore all * namespace declarations , some apps prefer ( or even expect . . . ) * that ns bindings are preserved as much as possible . So , let ' s * just try to output them as they are ( could optimize and skip * ones related to the start element [ same prefix or URI ] , but * for now let ' s not bother ) */ int nsCount = elemStack . getCurrentNsCount ( ) ; if ( nsCount > 0 ) { // yup , got some . . . for ( int i = 0 ; i < nsCount ; ++ i ) { writeNamespace ( elemStack . getLocalNsPrefix ( i ) , elemStack . getLocalNsURI ( i ) ) ; } } /* And then let ' s just output attributes , if any ( whether to copy * implicit , aka " default " attributes , is configurable ) */ int attrCount = mCfgCopyDefaultAttrs ? ac . getCount ( ) : ac . getSpecifiedCount ( ) ; /* Unlike in non - ns and simple - ns modes , we can not simply literally * copy the attributes here . It is possible that some namespace * prefixes have been remapped . . . so need to be bit more careful . */ if ( attrCount > 0 ) { for ( int i = 0 ; i < attrCount ; ++ i ) { // First ; need to make sure that the prefix - to - ns mapping // attribute has is valid . . . and can not output anything // before that ' s done ( since remapping will output a namespace // declaration ! ) uri = ac . getURI ( i ) ; prefix = ac . getPrefix ( i ) ; // With attributes , missing / empty prefix always means ' no // namespace ' , can take a shortcut : if ( prefix == null || prefix . length ( ) == 0 ) { ; } else { // and otherwise we ' ll always have a prefix as attributes // can not make use of the def . namespace . . . prefix = findOrCreateAttrPrefix ( prefix , uri , mCurrElem ) ; } /* Hmmh . Since the prefix we use may be different from what * collector has , we can not use pass - through method of * the collector , but need to call XmlWriter directly : */ if ( prefix == null || prefix . length ( ) == 0 ) { mWriter . writeAttribute ( ac . getLocalName ( i ) , ac . getValue ( i ) ) ; } else { mWriter . writeAttribute ( prefix , ac . getLocalName ( i ) , ac . getValue ( i ) ) ; } } }
public class ProteinBuilderTool { /** * Creates a BioPolymer from a sequence of amino acid as identified by a * the sequence of their one letter codes . It uses the given { @ link IChemObjectBuilder } * to create a data model . * < p > For example : * < pre > * BioPolymer protein = ProteinBuilderTool . createProtein ( * " GAGA " , SilentChemObjectBuilder . getInstance ( ) * < / pre > * @ see # createProtein ( String ) */ public static IBioPolymer createProtein ( String sequence , IChemObjectBuilder builder ) throws CDKException { } }
Map < String , IAminoAcid > templates = AminoAcids . getHashMapBySingleCharCode ( ) ; IBioPolymer protein = builder . newInstance ( IBioPolymer . class ) ; IStrand strand = builder . newInstance ( IStrand . class ) ; IAminoAcid previousAA = null ; for ( int i = 0 ; i < sequence . length ( ) ; i ++ ) { String aminoAcidCode = "" + sequence . charAt ( i ) ; logger . debug ( "Adding AA: " + aminoAcidCode ) ; if ( aminoAcidCode . equals ( " " ) ) { // fine , just skip spaces } else { IAminoAcid aminoAcid = ( IAminoAcid ) templates . get ( aminoAcidCode ) ; if ( aminoAcid == null ) { throw new CDKException ( "Cannot build sequence! Unknown amino acid: " + aminoAcidCode ) ; } try { aminoAcid = ( IAminoAcid ) aminoAcid . clone ( ) ; } catch ( CloneNotSupportedException e ) { throw new CDKException ( "Cannot build sequence! Clone exception: " + e . getMessage ( ) , e ) ; } aminoAcid . setMonomerName ( aminoAcidCode + i ) ; logger . debug ( "protein: " , protein ) ; logger . debug ( "strand: " , strand ) ; addAminoAcidAtCTerminus ( protein , aminoAcid , strand , previousAA ) ; previousAA = aminoAcid ; } } // add the last oxygen of the protein IAtom oxygen = builder . newInstance ( IAtom . class , "O" ) ; // . . . to amino acid previousAA . addAtom ( oxygen ) ; IBond bond = builder . newInstance ( IBond . class , oxygen , previousAA . getCTerminus ( ) , IBond . Order . SINGLE ) ; previousAA . addBond ( bond ) ; // . . . and to protein protein . addAtom ( oxygen , previousAA , strand ) ; protein . addBond ( bond ) ; return protein ;
public class VectorUtil { /** * Compute the absolute cosine of the angle between two dense vectors . * To convert it to radians , use < code > Math . acos ( angle ) < / code > ! * @ param v1 first vector * @ param v2 second vector * @ return Angle */ public static double angleDense ( NumberVector v1 , NumberVector v2 ) { } }
final int dim1 = v1 . getDimensionality ( ) , dim2 = v2 . getDimensionality ( ) ; final int mindim = ( dim1 <= dim2 ) ? dim1 : dim2 ; // Essentially , we want to compute this : // v1 . transposeTimes ( v2 ) / ( v1 . euclideanLength ( ) * v2 . euclideanLength ( ) ) ; // We can just compute all three in parallel . double cross = 0 , l1 = 0 , l2 = 0 ; for ( int k = 0 ; k < mindim ; k ++ ) { final double r1 = v1 . doubleValue ( k ) ; final double r2 = v2 . doubleValue ( k ) ; cross += r1 * r2 ; l1 += r1 * r1 ; l2 += r2 * r2 ; } for ( int k = mindim ; k < dim1 ; k ++ ) { final double r1 = v1 . doubleValue ( k ) ; l1 += r1 * r1 ; } for ( int k = mindim ; k < dim2 ; k ++ ) { final double r2 = v2 . doubleValue ( k ) ; l2 += r2 * r2 ; } final double a = ( cross == 0. ) ? 0. : ( l1 == 0. || l2 == 0. ) ? 1. : FastMath . sqrt ( ( cross / l1 ) * ( cross / l2 ) ) ; return ( a < 1. ) ? a : 1. ;
public class FaultManager { /** * Check if a resource on a node is blacklisted . * @ param nodeName The node name . * @ param type The type of resource to check for blacklisting . * @ return A boolean value that is true if blacklisted , false if not . */ public boolean isBlacklisted ( String nodeName , ResourceType type ) { } }
List < ResourceType > blacklistedResourceTypes = blacklistedNodes . get ( nodeName ) ; if ( blacklistedResourceTypes != null ) { synchronized ( blacklistedResourceTypes ) { return blacklistedResourceTypes . contains ( type ) ; } } else { return false ; }
public class Distribution { /** * Computes the inverse Cumulative Density Function ( CDF < sup > - 1 < / sup > ) at * the given point . It takes in a value in the range of [ 0 , 1 ] and returns * the value x , such that CDF ( x ) = < tt > p < / tt > * @ param p the probability value * @ return the value such that the CDF would return < tt > p < / tt > */ public double invCdf ( double p ) { } }
if ( p < 0 || p > 1 ) throw new ArithmeticException ( "Value of p must be in the range [0,1], not " + p ) ; double a = Double . isInfinite ( min ( ) ) ? Double . MIN_VALUE : min ( ) ; double b = Double . isInfinite ( max ( ) ) ? Double . MAX_VALUE : max ( ) ; // default case , lets just do a root finding on the CDF for the specific value of p return Zeroin . root ( a , b , ( x ) -> cdf ( x ) - p ) ;
public class CommitVisitor { /** * Determines whether the specified visitable object is commit - able . The object is commit - able if the object * is Auditable and this Visitor is not targeting a specific object in the application domain object graph * hierarchy . * @ param visitable the visited object being evaluated for commit - ability . * @ return a boolean value indicating whether the targeted object can be committed . * @ see org . cp . elements . lang . Auditable */ protected boolean isCommitable ( Object visitable ) { } }
return ( visitable instanceof Auditable && ( target == null || identity ( visitable ) == identity ( target ) ) ) ;
public class Gauge { /** * Defines if only the first and the last ticklabel * will be drawn . Sometimes this could be useful if a gauge * should for example only should show 0 and 1000. * @ param VISIBLE */ public void setOnlyFirstAndLastTickLabelVisible ( final boolean VISIBLE ) { } }
if ( null == onlyFirstAndLastTickLabelVisible ) { _onlyFirstAndLastTickLabelVisible = VISIBLE ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { onlyFirstAndLastTickLabelVisible . set ( VISIBLE ) ; }
public class CleverTapAPI { /** * Push */ private void pushFCMDeviceToken ( String token , final boolean register , final boolean forceUpdate ) { } }
synchronized ( tokenLock ) { if ( havePushedDeviceToken && ! forceUpdate ) { getConfigLogger ( ) . verbose ( getAccountId ( ) , "FcmManager: skipping device token push - already sent." ) ; return ; } try { token = ( token != null ) ? token : getCachedFCMToken ( ) ; if ( token == null ) return ; pushDeviceToken ( context , token , register , PushType . FCM ) ; havePushedDeviceToken = true ; } catch ( Throwable t ) { getConfigLogger ( ) . verbose ( getAccountId ( ) , "FcmManager: pushing device token failed" , t ) ; } }
public class OfflineDownloadOptions { /** * Used to build a new instance of this class . * @ return this classes builder class * @ since 0.1.0 */ public static Builder builder ( ) { } }
return new AutoValue_OfflineDownloadOptions . Builder ( ) . uuid ( UUID . randomUUID ( ) . getMostSignificantBits ( ) ) . metadata ( new byte [ ] { } ) . progress ( 0 ) ; // TODO user must provide a notificationOptions object
public class ProgressiveJpegParser { /** * Parses more data from inputStream . * @ param inputStream instance of buffered pooled byte buffer input stream */ private boolean doParseMoreData ( final InputStream inputStream ) { } }
final int oldBestScanNumber = mBestScanNumber ; try { int nextByte ; while ( mParserState != NOT_A_JPEG && ( nextByte = inputStream . read ( ) ) != - 1 ) { mBytesParsed ++ ; if ( mEndMarkerRead ) { // There should be no more data after the EOI marker , just in case there is lets // bail out instead of trying to parse the unknown data mParserState = NOT_A_JPEG ; mEndMarkerRead = false ; return false ; } switch ( mParserState ) { case READ_FIRST_JPEG_BYTE : if ( nextByte == JfifUtil . MARKER_FIRST_BYTE ) { mParserState = READ_SECOND_JPEG_BYTE ; } else { mParserState = NOT_A_JPEG ; } break ; case READ_SECOND_JPEG_BYTE : if ( nextByte == JfifUtil . MARKER_SOI ) { mParserState = READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA ; } else { mParserState = NOT_A_JPEG ; } break ; case READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA : if ( nextByte == JfifUtil . MARKER_FIRST_BYTE ) { mParserState = READ_MARKER_SECOND_BYTE ; } break ; case READ_MARKER_SECOND_BYTE : if ( nextByte == JfifUtil . MARKER_FIRST_BYTE ) { mParserState = READ_MARKER_SECOND_BYTE ; } else if ( nextByte == JfifUtil . MARKER_ESCAPE_BYTE ) { mParserState = READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA ; } else if ( nextByte == JfifUtil . MARKER_EOI ) { mEndMarkerRead = true ; newScanOrImageEndFound ( mBytesParsed - 2 ) ; // There should be no data after the EOI marker , but in case there is , let ' s process // the next byte as a first marker byte . mParserState = READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA ; } else { if ( nextByte == JfifUtil . MARKER_SOS ) { newScanOrImageEndFound ( mBytesParsed - 2 ) ; } if ( doesMarkerStartSegment ( nextByte ) ) { mParserState = READ_SIZE_FIRST_BYTE ; } else { mParserState = READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA ; } } break ; case READ_SIZE_FIRST_BYTE : mParserState = READ_SIZE_SECOND_BYTE ; break ; case READ_SIZE_SECOND_BYTE : final int size = ( mLastByteRead << 8 ) + nextByte ; // We need to jump after the end of the segment - skip size - 2 next bytes . // We might want to skip more data than is available to read , in which case we will // consume entire data in inputStream and exit this function before entering another // iteration of the loop . final int bytesToSkip = size - 2 ; StreamUtil . skip ( inputStream , bytesToSkip ) ; mBytesParsed += bytesToSkip ; mParserState = READ_MARKER_FIRST_BYTE_OR_ENTROPY_DATA ; break ; case NOT_A_JPEG : default : Preconditions . checkState ( false ) ; } mLastByteRead = nextByte ; } } catch ( IOException ioe ) { // does not happen , input stream returned by pooled byte buffer does not throw IOExceptions Throwables . propagate ( ioe ) ; } return mParserState != NOT_A_JPEG && mBestScanNumber != oldBestScanNumber ;
public class AWSStorageGatewayClient { /** * Deletes the specified virtual tape . This operation is only supported in the tape gateway type . * @ param deleteTapeRequest * DeleteTapeInput * @ return Result of the DeleteTape operation returned by the service . * @ throws InvalidGatewayRequestException * An exception occurred because an invalid gateway request was issued to the service . For more information , * see the error and message fields . * @ throws InternalServerErrorException * An internal server error has occurred during the request . For more information , see the error and message * fields . * @ sample AWSStorageGateway . DeleteTape * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / storagegateway - 2013-06-30 / DeleteTape " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteTapeResult deleteTape ( DeleteTapeRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteTape ( request ) ;
public class ReflectionUtil { /** * Rerturns the { @ link Class # getDeclaredFields ( ) declared field } with the given name . * @ param type must not be < code > null < / code > . * @ param name must not be < code > null < / code > . * @ return the field , or < code > null < / code > */ public static Field findField ( Class < ? > type , String name ) { } }
if ( type == null ) { throw new IllegalArgumentException ( "Method argument type must not be null" ) ; } if ( name == null ) { throw new IllegalArgumentException ( "Method argument name must not be null" ) ; } Class < ? > c = type ; do { for ( Field f : c . getDeclaredFields ( ) ) { if ( name . equals ( f . getName ( ) ) ) { return f ; } } c = c . getSuperclass ( ) ; } while ( c != null ) ; return null ;
public class PorterStemmer { /** * cons ( i ) is true < = > b [ i ] is a consonant . */ private final boolean cons ( int i ) { } }
switch ( sb . charAt ( i ) ) { case 'a' : case 'e' : case 'i' : case 'o' : case 'u' : return false ; case 'y' : return ( i == 0 ) ? true : ! cons ( i - 1 ) ; default : return true ; }
public class InputChannel { /** * Increases the current backoff and returns whether the operation was successful . * @ return < code > true < / code > , iff the operation was successful . Otherwise , < code > false < / code > . */ protected boolean increaseBackoff ( ) { } }
// Backoff is disabled if ( currentBackoff < 0 ) { return false ; } // This is the first time backing off if ( currentBackoff == 0 ) { currentBackoff = initialBackoff ; return true ; } // Continue backing off else if ( currentBackoff < maxBackoff ) { currentBackoff = Math . min ( currentBackoff * 2 , maxBackoff ) ; return true ; } // Reached maximum backoff return false ;