signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StringArrayList { /** * TODO */ public List < String > toList ( ) { } }
List < String > result ; result = new ArrayList < String > ( ) ; for ( int i = 0 ; i < size ; i ++ ) { result . add ( data [ i ] ) ; } return result ;
public class ProcessTweaks { /** * Removes all CALL nodes in the given TweakInfos , replacing calls to getter * functions with the tweak ' s default value . */ private void stripAllCalls ( Map < String , TweakInfo > tweakInfos ) { } }
for ( TweakInfo tweakInfo : tweakInfos . values ( ) ) { boolean isRegistered = tweakInfo . isRegistered ( ) ; for ( TweakFunctionCall functionCall : tweakInfo . functionCalls ) { Node callNode = functionCall . callNode ; Node parent = callNode . getParent ( ) ; if ( functionCall . tweakFunc . isGetterFunction ( ) ) { Node newValue ; if ( isRegistered ) { newValue = tweakInfo . getDefaultValueNode ( ) . cloneNode ( ) ; } else { // When we find a getter of an unregistered tweak , there has // already been a warning about it , so now just use a default // value when stripping . TweakFunction registerFunction = functionCall . tweakFunc . registerFunction ; newValue = registerFunction . createDefaultValueNode ( ) ; } parent . replaceChild ( callNode , newValue ) ; compiler . reportChangeToEnclosingScope ( parent ) ; } else { Node voidZeroNode = IR . voidNode ( IR . number ( 0 ) . srcref ( callNode ) ) . srcref ( callNode ) ; parent . replaceChild ( callNode , voidZeroNode ) ; compiler . reportChangeToEnclosingScope ( parent ) ; } } }
public class UserInfoGridScreen { /** * Add all the screen listeners . */ public void addListeners ( ) { } }
super . addListeners ( ) ; Record recUserInfo = this . getMainRecord ( ) ; recUserInfo . setKeyArea ( UserInfo . USER_NAME_KEY ) ; if ( m_recHeader != null ) ( ( ReferenceField ) this . getScreenRecord ( ) . getField ( UserScreenRecord . USER_GROUP_ID ) ) . setReference ( m_recHeader ) ; recUserInfo . addListener ( new ExtractRangeFilter ( UserInfo . USER_NAME , this . getScreenRecord ( ) . getField ( UserScreenRecord . NAME_SORT ) ) ) ; recUserInfo . addListener ( new CompareFileFilter ( recUserInfo . getField ( UserInfo . USER_GROUP_ID ) , this . getScreenRecord ( ) . getField ( UserScreenRecord . USER_GROUP_ID ) , "=" , null , true ) ) ; this . getScreenRecord ( ) . getField ( UserScreenRecord . NAME_SORT ) . addListener ( new FieldReSelectHandler ( this ) ) ; this . getScreenRecord ( ) . getField ( UserScreenRecord . USER_GROUP_ID ) . addListener ( new FieldReSelectHandler ( this ) ) ; this . getMainRecord ( ) . addListener ( new SetupNewUserHandler ( null ) ) ;
public class UnderReplicatedBlocks { /** * / * remove a block from a under replication queue */ synchronized boolean remove ( BlockInfo blockInfo , int oldReplicas , int decommissionedReplicas , int oldExpectedReplicas ) { } }
int priLevel = getPriority ( blockInfo , oldReplicas , decommissionedReplicas , oldExpectedReplicas ) ; return remove ( blockInfo , priLevel ) ;
public class SerializerBase { /** * To fire off end document trace event */ protected void fireEndDoc ( ) throws org . xml . sax . SAXException { } }
if ( m_tracer != null ) { flushMyWriter ( ) ; m_tracer . fireGenerateEvent ( SerializerTrace . EVENTTYPE_ENDDOCUMENT ) ; }
public class GradientEditor { /** * Simple test case for the gradient painter * @ param argv The arguments supplied at the command line */ public static void main ( String [ ] argv ) { } }
JFrame frame = new JFrame ( ) ; JPanel panel = new JPanel ( ) ; panel . setBorder ( BorderFactory . createTitledBorder ( "Gradient" ) ) ; panel . setLayout ( null ) ; frame . setContentPane ( panel ) ; GradientEditor editor = new GradientEditor ( ) ; editor . setBounds ( 10 , 15 , 270 , 100 ) ; panel . add ( editor ) ; frame . setSize ( 300 , 200 ) ; frame . addWindowListener ( new WindowAdapter ( ) { public void windowClosing ( WindowEvent e ) { System . exit ( 0 ) ; } } ) ; frame . setVisible ( true ) ;
public class JsonHash { /** * see { @ link Map # put ( Object , Object ) } . * this method is alternative of { @ link # put ( String , Object , Type ) } call with { @ link Type # LONG } . * @ param key * @ param value * @ return see { @ link Map # put ( Object , Object ) } * @ since 1.4.12 * @ author vvakame */ public Object put ( String key , JsonArray value ) { } }
if ( value == null ) { stateMap . put ( key , Type . NULL ) ; } else { stateMap . put ( key , Type . ARRAY ) ; } return super . put ( key , value ) ;
public class Streams { /** * Returns an output stream corresponding to the given file . * @ param file * a non - null { @ code File } object . * @ return * a { @ code FileOutputStrea } to the given file , or { @ code null } if the file * is not valid . * @ throws FileNotFoundException */ public static OutputStream toFile ( File file ) throws FileNotFoundException { } }
if ( file != null ) { return new FileOutputStream ( file ) ; } return null ;
public class GenericEncodingStrategy { /** * Generates code to get a Lob locator value from RawSupport . RawSupport * instance and Lob instance must be on the stack . Result is a long locator * value on the stack . */ private void getLobLocator ( CodeAssembler a , StorablePropertyInfo info ) { } }
if ( ! info . isLob ( ) ) { throw new IllegalArgumentException ( ) ; } a . invokeInterface ( TypeDesc . forClass ( RawSupport . class ) , "getLocator" , TypeDesc . LONG , new TypeDesc [ ] { info . getStorageType ( ) } ) ;
public class SessionImpl { /** * { @ inheritDoc } */ public void exportWorkspaceSystemView ( OutputStream out , boolean skipBinary , boolean noRecurse ) throws IOException , PathNotFoundException , RepositoryException { } }
checkLive ( ) ; LocationFactory factory = new LocationFactory ( ( ( NamespaceRegistryImpl ) repository . getNamespaceRegistry ( ) ) ) ; WorkspaceEntry wsConfig = ( WorkspaceEntry ) container . getComponentInstanceOfType ( WorkspaceEntry . class ) ; ValueFactoryImpl valueFactoryImpl = new ValueFactoryImpl ( factory , wsConfig , cleanerHolder ) ; try { BaseXmlExporter exporter = new ExportImportFactory ( ) . getExportVisitor ( XmlMapping . BACKUP , out , skipBinary , noRecurse , getTransientNodesManager ( ) , repository . getNamespaceRegistry ( ) , valueFactoryImpl ) ; ItemData srcItemData = dataManager . getItemData ( Constants . ROOT_UUID ) ; if ( srcItemData == null ) { throw new PathNotFoundException ( "Root node not found" ) ; } exporter . export ( ( NodeData ) srcItemData ) ; } catch ( XMLStreamException e ) { throw new IOException ( e . getLocalizedMessage ( ) , e ) ; } catch ( SAXException e ) { throw new IOException ( e . getLocalizedMessage ( ) , e ) ; }
public class GMLGenerator { /** * ( non - Javadoc ) * @ see com . rometools . rome . io . ModuleGenerator # generate ( com . rometools . rome . feed . module . Module , * org . jdom2 . Element ) */ @ Override public void generate ( final Module module , final Element element ) { } }
// this is not necessary , it is done to avoid the namespace definition // in every item . Element root = element ; while ( root . getParent ( ) != null && root . getParent ( ) instanceof Element ) { root = ( Element ) element . getParent ( ) ; } root . addNamespaceDeclaration ( GeoRSSModule . SIMPLE_NS ) ; root . addNamespaceDeclaration ( GeoRSSModule . GML_NS ) ; final Element whereElement = new Element ( "where" , GeoRSSModule . SIMPLE_NS ) ; element . addContent ( whereElement ) ; final GeoRSSModule geoRSSModule = ( GeoRSSModule ) module ; final AbstractGeometry geometry = geoRSSModule . getGeometry ( ) ; if ( geometry instanceof Point ) { final Position pos = ( ( Point ) geometry ) . getPosition ( ) ; final Element pointElement = new Element ( "Point" , GeoRSSModule . GML_NS ) ; whereElement . addContent ( pointElement ) ; final Element posElement = new Element ( "pos" , GeoRSSModule . GML_NS ) ; posElement . addContent ( String . valueOf ( pos . getLatitude ( ) ) + " " + String . valueOf ( pos . getLongitude ( ) ) ) ; pointElement . addContent ( posElement ) ; } else if ( geometry instanceof LineString ) { final PositionList posList = ( ( LineString ) geometry ) . getPositionList ( ) ; final Element lineElement = new Element ( "LineString" , GeoRSSModule . GML_NS ) ; lineElement . addContent ( createPosListElement ( posList ) ) ; whereElement . addContent ( lineElement ) ; } else if ( geometry instanceof Polygon ) { final Element polygonElement = new Element ( "Polygon" , GeoRSSModule . GML_NS ) ; { final AbstractRing ring = ( ( Polygon ) geometry ) . getExterior ( ) ; if ( ring instanceof LinearRing ) { final Element exteriorElement = new Element ( "exterior" , GeoRSSModule . GML_NS ) ; polygonElement . addContent ( exteriorElement ) ; final Element ringElement = new Element ( "LinearRing" , GeoRSSModule . GML_NS ) ; exteriorElement . addContent ( ringElement ) ; ringElement . addContent ( createPosListElement ( ( ( LinearRing ) ring ) . getPositionList ( ) ) ) ; } else { System . err . println ( "GeoRSS GML format can't handle rings of type: " + ring . getClass ( ) . getName ( ) ) ; } } final List < AbstractRing > interiorList = ( ( Polygon ) geometry ) . getInterior ( ) ; final Iterator < AbstractRing > it = interiorList . iterator ( ) ; while ( it . hasNext ( ) ) { final AbstractRing ring = it . next ( ) ; if ( ring instanceof LinearRing ) { final Element interiorElement = new Element ( "interior" , GeoRSSModule . GML_NS ) ; polygonElement . addContent ( interiorElement ) ; final Element ringElement = new Element ( "LinearRing" , GeoRSSModule . GML_NS ) ; interiorElement . addContent ( ringElement ) ; ringElement . addContent ( createPosListElement ( ( ( LinearRing ) ring ) . getPositionList ( ) ) ) ; } else { System . err . println ( "GeoRSS GML format can't handle rings of type: " + ring . getClass ( ) . getName ( ) ) ; } } whereElement . addContent ( polygonElement ) ; } else if ( geometry instanceof Envelope ) { final Envelope envelope = ( Envelope ) geometry ; final Element envelopeElement = new Element ( "Envelope" , GeoRSSModule . GML_NS ) ; whereElement . addContent ( envelopeElement ) ; final Element lowerElement = new Element ( "lowerCorner" , GeoRSSModule . GML_NS ) ; lowerElement . addContent ( String . valueOf ( envelope . getMinLatitude ( ) ) + " " + String . valueOf ( envelope . getMinLongitude ( ) ) ) ; envelopeElement . addContent ( lowerElement ) ; final Element upperElement = new Element ( "upperCorner" , GeoRSSModule . GML_NS ) ; upperElement . addContent ( String . valueOf ( envelope . getMaxLatitude ( ) ) + " " + String . valueOf ( envelope . getMaxLongitude ( ) ) ) ; envelopeElement . addContent ( upperElement ) ; } else { System . err . println ( "GeoRSS GML format can't handle geometries of type: " + geometry . getClass ( ) . getName ( ) ) ; }
public class IOUtil { /** * force the deletion of a file */ public void forceDelete ( File tempFile ) { } }
try { if ( tempFile != null && tempFile . exists ( ) ) { FileUtils . forceDelete ( tempFile ) ; } } catch ( Throwable t ) { t . printStackTrace ( ) ; }
public class CmsDriverManager { /** * Publishes the resources of a specified publish list . < p > * @ param cms the current request context * @ param dbc the current database context * @ param publishList a publish list * @ param report an instance of < code > { @ link I _ CmsReport } < / code > to print messages * @ throws CmsException if something goes wrong * @ see # fillPublishList ( CmsDbContext , CmsPublishList ) */ public synchronized void publishProject ( CmsObject cms , CmsDbContext dbc , CmsPublishList publishList , I_CmsReport report ) throws CmsException { } }
// check the parent folders checkParentFolders ( dbc , publishList ) ; ensureSubResourcesOfMovedFoldersPublished ( cms , dbc , publishList ) ; OpenCms . getPublishManager ( ) . getPublishListVerifier ( ) . checkPublishList ( publishList ) ; try { // fire an event that a project is to be published Map < String , Object > eventData = new HashMap < String , Object > ( ) ; eventData . put ( I_CmsEventListener . KEY_REPORT , report ) ; eventData . put ( I_CmsEventListener . KEY_PUBLISHLIST , publishList ) ; eventData . put ( I_CmsEventListener . KEY_PROJECTID , dbc . currentProject ( ) . getUuid ( ) ) ; eventData . put ( I_CmsEventListener . KEY_DBCONTEXT , dbc ) ; CmsEvent beforePublishEvent = new CmsEvent ( I_CmsEventListener . EVENT_BEFORE_PUBLISH_PROJECT , eventData ) ; OpenCms . fireCmsEvent ( beforePublishEvent ) ; } catch ( Throwable t ) { if ( report != null ) { report . addError ( t ) ; report . println ( t ) ; } if ( LOG . isErrorEnabled ( ) ) { LOG . error ( t . getLocalizedMessage ( ) , t ) ; } } // lock all resources with the special publish lock Iterator < CmsResource > itResources = new ArrayList < CmsResource > ( publishList . getAllResources ( ) ) . iterator ( ) ; while ( itResources . hasNext ( ) ) { CmsResource resource = itResources . next ( ) ; CmsLock lock = m_lockManager . getLock ( dbc , resource , false ) ; if ( lock . getSystemLock ( ) . isUnlocked ( ) && lock . isLockableBy ( dbc . currentUser ( ) ) ) { if ( getLock ( dbc , resource ) . getEditionLock ( ) . isNullLock ( ) ) { lockResource ( dbc , resource , CmsLockType . PUBLISH ) ; } else { changeLock ( dbc , resource , CmsLockType . PUBLISH ) ; } } else if ( lock . getSystemLock ( ) . isPublish ( ) ) { if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . RPT_PUBLISH_REMOVED_RESOURCE_1 , dbc . removeSiteRoot ( resource . getRootPath ( ) ) ) ) ; } // remove files that are already waiting to be published publishList . remove ( resource ) ; continue ; } else { // this is needed to fix TestPublishIsssues # testPublishScenarioE changeLock ( dbc , resource , CmsLockType . PUBLISH ) ; } // now re - check the lock state lock = m_lockManager . getLock ( dbc , resource , false ) ; if ( ! lock . getSystemLock ( ) . isPublish ( ) ) { if ( report != null ) { report . println ( Messages . get ( ) . container ( Messages . RPT_PUBLISH_REMOVED_RESOURCE_1 , dbc . removeSiteRoot ( resource . getRootPath ( ) ) ) , I_CmsReport . FORMAT_WARNING ) ; } if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( Messages . get ( ) . getBundle ( ) . key ( Messages . RPT_PUBLISH_REMOVED_RESOURCE_1 , dbc . removeSiteRoot ( resource . getRootPath ( ) ) ) ) ; } // remove files that could not be locked publishList . remove ( resource ) ; } } // enqueue the publish job CmsException enqueueException = null ; try { m_publishEngine . enqueuePublishJob ( cms , publishList , report ) ; } catch ( CmsException exc ) { enqueueException = exc ; } // if an exception was raised , remove the publish locks // and throw the exception again if ( enqueueException != null ) { itResources = publishList . getAllResources ( ) . iterator ( ) ; while ( itResources . hasNext ( ) ) { CmsResource resource = itResources . next ( ) ; CmsLock lock = m_lockManager . getLock ( dbc , resource , false ) ; if ( lock . getSystemLock ( ) . isPublish ( ) && lock . getSystemLock ( ) . isOwnedInProjectBy ( cms . getRequestContext ( ) . getCurrentUser ( ) , cms . getRequestContext ( ) . getCurrentProject ( ) ) ) { unlockResource ( dbc , resource , true , true ) ; } } throw enqueueException ; }
public class Latkes { /** * Gets the runtime cache . * @ return runtime cache */ public static RuntimeCache getRuntimeCache ( ) { } }
final String runtimeCache = getLocalProperty ( "runtimeCache" ) ; if ( null == runtimeCache ) { LOGGER . debug ( "Not found [runtimeCache] in local.properties, uses [LOCAL_LRU] as default" ) ; return RuntimeCache . LOCAL_LRU ; } return RuntimeCache . valueOf ( runtimeCache ) ;
public class Costs { /** * Subtracts the given costs from these costs . If the given costs are unknown , then these costs are remain unchanged . * @ param other The costs to subtract . */ public void subtractCosts ( Costs other ) { } }
if ( this . networkCost != UNKNOWN && other . networkCost != UNKNOWN ) { this . networkCost -= other . networkCost ; if ( this . networkCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; } } if ( this . diskCost != UNKNOWN && other . diskCost != UNKNOWN ) { this . diskCost -= other . diskCost ; if ( this . diskCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; } } if ( this . cpuCost != UNKNOWN && other . cpuCost != UNKNOWN ) { this . cpuCost -= other . cpuCost ; if ( this . cpuCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; } } // - - - - - relative costs - - - - - this . heuristicNetworkCost -= other . heuristicNetworkCost ; if ( this . heuristicNetworkCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; } this . heuristicDiskCost -= other . heuristicDiskCost ; if ( this . heuristicDiskCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; } this . heuristicCpuCost -= other . heuristicCpuCost ; if ( this . heuristicCpuCost < 0 ) { throw new IllegalArgumentException ( "Cannot subtract more cost then there is." ) ; }
public class BaseConvertToNative { /** * Move this standard payload properties from the message to the xml . * @ param message * @ param msg * @ param strKey */ public void setPayloadProperty ( BaseMessage message , Object msg , String strKey , Class < ? > classKey ) { } }
Object data = message . get ( strKey ) ; if ( data == null ) return ; this . setPayloadProperty ( data , msg , strKey , classKey ) ;
public class DynamicResourcePool { /** * Closes the pool and clears all the resources . The resource pool should not be used after this . */ @ Override public void close ( ) throws IOException { } }
try { mLock . lock ( ) ; if ( mAvailableResources . size ( ) != mResources . size ( ) ) { LOG . warn ( "{} resources are not released when closing the resource pool." , mResources . size ( ) - mAvailableResources . size ( ) ) ; } for ( ResourceInternal < T > resourceInternal : mAvailableResources ) { closeResource ( resourceInternal . mResource ) ; } mAvailableResources . clear ( ) ; } finally { mLock . unlock ( ) ; } mGcFuture . cancel ( true ) ;
public class BinaryString { /** * Concatenates input strings together into a single string . */ public static BinaryString concat ( Iterable < BinaryString > inputs ) { } }
// Compute the total length of the result . int totalLength = 0 ; for ( BinaryString input : inputs ) { if ( input != null ) { input . ensureMaterialized ( ) ; totalLength += input . getSizeInBytes ( ) ; } } // Allocate a new byte array , and copy the inputs one by one into it . final byte [ ] result = new byte [ totalLength ] ; int offset = 0 ; for ( BinaryString input : inputs ) { if ( input != null ) { int len = input . sizeInBytes ; SegmentsUtil . copyToBytes ( input . segments , input . offset , result , offset , len ) ; offset += len ; } } return fromBytes ( result ) ;
public class AccountsInner { /** * Gets the specified Azure Storage account linked to the given Data Lake Analytics account . * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account . * @ param accountName The name of the Data Lake Analytics account from which to retrieve Azure storage account details . * @ param storageAccountName The name of the Azure Storage account for which to retrieve the details . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the StorageAccountInfoInner object */ public Observable < ServiceResponse < StorageAccountInfoInner > > getStorageAccountWithServiceResponseAsync ( String resourceGroupName , String accountName , String storageAccountName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( storageAccountName == null ) { throw new IllegalArgumentException ( "Parameter storageAccountName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getStorageAccount ( resourceGroupName , accountName , storageAccountName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < StorageAccountInfoInner > > > ( ) { @ Override public Observable < ServiceResponse < StorageAccountInfoInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < StorageAccountInfoInner > clientResponse = getStorageAccountDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class DefaultMonetaryRoundingsSingletonSpi { /** * Allows to access the identifiers of the current defined roundings . * @ param providers the providers and ordering to be used . By default providers and ordering as defined in * # getDefaultProviders is used , not null . * @ return the set of custom rounding ids , never { @ code null } . */ @ Override public Set < String > getRoundingNames ( String ... providers ) { } }
Set < String > result = new HashSet < > ( ) ; String [ ] providerNames = providers ; if ( providerNames . length == 0 ) { providerNames = Monetary . getDefaultRoundingProviderChain ( ) . toArray ( new String [ Monetary . getDefaultRoundingProviderChain ( ) . size ( ) ] ) ; } for ( String providerName : providerNames ) { for ( RoundingProviderSpi prov : Bootstrap . getServices ( RoundingProviderSpi . class ) ) { try { if ( prov . getProviderName ( ) . equals ( providerName ) || prov . getProviderName ( ) . matches ( providerName ) ) { result . addAll ( prov . getRoundingNames ( ) ) ; } } catch ( Exception e ) { Logger . getLogger ( DefaultMonetaryRoundingsSingletonSpi . class . getName ( ) ) . log ( Level . SEVERE , "Error loading RoundingProviderSpi from provider: " + prov , e ) ; } } } return result ;
public class FSNamesystemDatanodeHelper { /** * Get status of the datanodes in the system . */ public static DatanodeStatus getDatanodeStats ( FSNamesystem ns , ArrayList < DatanodeDescriptor > live , ArrayList < DatanodeDescriptor > dead ) { } }
ns . DFSNodesStatus ( live , dead ) ; ArrayList < DatanodeDescriptor > decommissioning = ns . getDecommissioningNodesList ( live ) ; // live nodes int numLive = live . size ( ) ; int numLiveExcluded = 0 ; int numLiveDecommissioningInProgress = decommissioning . size ( ) ; int numLiveDecommissioned = 0 ; for ( DatanodeDescriptor d : live ) { numLiveDecommissioned += d . isDecommissioned ( ) ? 1 : 0 ; numLiveExcluded += ns . inExcludedHostsList ( d , null ) ? 1 : 0 ; } // dead nodes int numDead = dead . size ( ) ; int numDeadExcluded = 0 ; int numDeadDecommissioningNotCompleted = 0 ; int numDeadDecommissioned = 0 ; for ( DatanodeDescriptor d : dead ) { numDeadDecommissioned += d . isDecommissioned ( ) ? 1 : 0 ; numDeadExcluded += ns . inExcludedHostsList ( d , null ) ? 1 : 0 ; } numDeadDecommissioningNotCompleted = numDeadExcluded - numDeadDecommissioned ; return new DatanodeStatus ( numLive , numLiveExcluded , numLiveDecommissioningInProgress , numLiveDecommissioned , numDead , numDeadExcluded , numDeadDecommissioningNotCompleted , numDeadDecommissioned ) ;
public class Matrix4x3f { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3fc # invertOrtho ( org . joml . Matrix4x3f ) */ public Matrix4x3f invertOrtho ( Matrix4x3f dest ) { } }
float invM00 = 1.0f / m00 ; float invM11 = 1.0f / m11 ; float invM22 = 1.0f / m22 ; dest . set ( invM00 , 0 , 0 , 0 , invM11 , 0 , 0 , 0 , invM22 , - m30 * invM00 , - m31 * invM11 , - m32 * invM22 ) ; dest . properties = 0 ; return dest ;
public class CCApi2 { /** * Gets the bulk activities service . * @ return the bulk activities service */ public BulkActivitiesService getBulkActivitiesService ( ) { } }
if ( _bulkActivitiesService == null ) { synchronized ( CCApi2 . class ) { if ( _bulkActivitiesService == null ) { _bulkActivitiesService = _retrofit . create ( BulkActivitiesService . class ) ; } } } return _bulkActivitiesService ;
public class PPOImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EList < PPORG > getRG ( ) { } }
if ( rg == null ) { rg = new EObjectContainmentEList . Resolving < PPORG > ( PPORG . class , this , AfplibPackage . PPO__RG ) ; } return rg ;
public class MDBRuntimeImpl { /** * dynamic / optional / multiple . May be called at any time and in any order * @ param reference reference to EndpointActivationService service */ @ Reference ( name = REFERENCE_ENDPOINT_ACTIVATION_SERVICES , service = EndpointActivationService . class , policy = ReferencePolicy . DYNAMIC , cardinality = ReferenceCardinality . MULTIPLE ) protected synchronized void addEndPointActivationService ( ServiceReference < EndpointActivationService > reference ) { } }
String activationSvcId = ( String ) reference . getProperty ( ACT_SPEC_CFG_ID ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "activationSvcId : " + activationSvcId ) ; } EndpointActivationServiceInfo easInfo = createEndpointActivationServiceInfo ( activationSvcId ) ; // Deactivate any endpoints that were using the old service . if ( easInfo . service != null ) { deactivateEndpoints ( easInfo . endpointFactories ) ; } // Activate any endpoints with the new service . easInfo . setReference ( reference ) ; activateDeferredEndpoints ( easInfo . endpointFactories ) ;
public class BinaryMap { /** * { @ inheritDoc } * @ param fromKey * @ param inclusive * @ return */ @ Override public NavigableMap < K , V > tailMap ( K fromKey , boolean inclusive ) { } }
Entry < K , V > from = entry ( fromKey , null ) ; return new BinaryMap < > ( entrySet . tailSet ( from , inclusive ) , comparator ) ;
public class PropertyListSerialization { /** * Serialize a Boolean as a true or false element . * @ param val * boolean to serialize . * @ param handler * destination of serialization events . * @ throws SAXException * if exception during serialization . */ private static void serializeBoolean ( final Boolean val , final ContentHandler handler ) throws SAXException { } }
String tag = "false" ; if ( val . booleanValue ( ) ) { tag = "true" ; } final AttributesImpl attributes = new AttributesImpl ( ) ; handler . startElement ( null , tag , tag , attributes ) ; handler . endElement ( null , tag , tag ) ;
public class ToolProvider { /** * Determine if this is the desired tool instance . * @ param < T > the interface of the tool * @ param tool the instance of the tool * @ param moduleName the name of the module containing the desired implementation * @ return true if and only if the tool matches the specified criteria */ private static < T > boolean matches ( T tool , String moduleName ) { } }
PrivilegedAction < Boolean > pa = ( ) -> { // for now , use reflection to implement // return moduleName . equals ( tool . getClass ( ) . getModule ( ) . getName ( ) ) ; try { Method getModuleMethod = Class . class . getDeclaredMethod ( "getModule" ) ; Object toolModule = getModuleMethod . invoke ( tool . getClass ( ) ) ; Method getNameMethod = toolModule . getClass ( ) . getDeclaredMethod ( "getName" ) ; String toolModuleName = ( String ) getNameMethod . invoke ( toolModule ) ; return moduleName . equals ( toolModuleName ) ; } catch ( InvocationTargetException | NoSuchMethodException | IllegalAccessException e ) { return false ; } } ; return AccessController . doPrivileged ( pa ) ;
public class AmazonCloudWatchClient { /** * Temporarily sets the state of an alarm for testing purposes . When the updated state differs from the previous * value , the action configured for the appropriate state is invoked . For example , if your alarm is configured to * send an Amazon SNS message when an alarm is triggered , temporarily changing the alarm state to < code > ALARM < / code > * sends an SNS message . The alarm returns to its actual state ( often within seconds ) . Because the alarm state * change happens quickly , it is typically only visible in the alarm ' s < b > History < / b > tab in the Amazon CloudWatch * console or through < a > DescribeAlarmHistory < / a > . * @ param setAlarmStateRequest * @ return Result of the SetAlarmState operation returned by the service . * @ throws ResourceNotFoundException * The named resource does not exist . * @ throws InvalidFormatException * Data was not syntactically valid JSON . * @ sample AmazonCloudWatch . SetAlarmState * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / monitoring - 2010-08-01 / SetAlarmState " target = " _ top " > AWS API * Documentation < / a > */ @ Override public SetAlarmStateResult setAlarmState ( SetAlarmStateRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSetAlarmState ( request ) ;
public class Preconditions { /** * Precondition that clients are required to fulfill . * Violations are considered to be programming errors , on the clients part . * @ param < T > type of object to check * @ param reference the reference to check * @ param predicate the predicate that the given reference must satisfy * @ return the validated object * @ throws RequireViolation if the < tt > predicate < / tt > is < b > false < / b > */ public static < T > T require ( T reference , Predicate < T > predicate ) { } }
return require ( reference , predicate , "Expected to fulfill the requirement, got '%s'" , reference ) ;
public class SphinxLinks { /** * Our use case is * { @ code $ python - c " from docutils import nodes ; print ( ' term - ' + nodes . make _ id ( ' QR ' ) ) " } , which * returns { @ code term - qr } i . e . , identifiers conforming to the regular expression * [ a - z ] ( - ? [ a - z0-9 ] + ) * * But there is a requirement to use < em > pure < / em > java for this task . So we clone the function * here . python docutils is public domain . * @ see http : / / code . nabla . net / doc / docutils / api / docutils / nodes / docutils . nodes . make _ id . html */ public String make_id ( String txt ) { } }
// id = string . lower ( ) String id = txt . toLowerCase ( ) ; // if not isinstance ( id , unicode ) : // id = id . decode ( ) // id = id . translate ( _ non _ id _ translate _ digraphs ) id = translate ( id , nonIdTranslateDigraphs ) ; // id = id . translate ( _ non _ id _ translate ) id = translate ( id , nonIdTranslate ) ; // # get rid of non - ascii characters . // # ' ascii ' lowercase to prevent problems with turkish locale . // id = unicodedata . normalize ( ' NFKD ' , id ) . \ // encode ( ' ascii ' , ' ignore ' ) . decode ( ' ascii ' ) // # shrink runs of whitespace and replace by hyphen // id = _ non _ id _ chars . sub ( ' - ' , ' ' . join ( id . split ( ) ) ) id = id . replaceAll ( "\\s+" , " " ) . replaceAll ( nonIdChars , "-" ) ; // id = _ non _ id _ at _ ends . sub ( ' ' , id ) id = id . replaceAll ( nonIdAtEnds , "" ) ; // return str ( id ) return id ;
public class AbstractApplication { /** * Return the application class name without the Application suffix . * @ return the application class short name */ private String computeShortClassName ( ) { } }
String name = this . getClass ( ) . getSimpleName ( ) ; if ( name . endsWith ( APP_SUFFIX_CLASSNAME ) ) { name = name . substring ( 0 , name . indexOf ( APP_SUFFIX_CLASSNAME ) ) ; } return name ;
public class WordSegmenting { /** * Display copyright . */ public static void displayCopyright ( ) { } }
System . out . println ( "Vietnamese Word Segmentation:" ) ; System . out . println ( "\tusing Conditional Random Fields" ) ; System . out . println ( "\ttesting our dataset of 8000 sentences with the highest F1-measure of 94%" ) ; System . out . println ( "Copyright (C) by Cam-Tu Nguyen {1,2} and Xuan-Hieu Phan {2}" ) ; System . out . println ( "{1}: College of Technology, Hanoi National University" ) ; System . out . println ( "{2}: Graduate School of Information Sciences, Tohoku University" ) ; System . out . println ( "Email: {ncamtu@gmail.com ; pxhieu@gmail.com}" ) ; System . out . println ( ) ;
public class Ast { /** * get the actual data as Java objects . * @ param dictConverter object to convert dicts to actual instances for a class , * instead of leaving them as dictionaries . Requires the _ _ class _ _ key to be present * in the dict node . If it returns null , the normal processing is done . */ public Object getData ( IDictToInstance dictConverter ) { } }
ObjectifyVisitor v = new ObjectifyVisitor ( dictConverter ) ; this . accept ( v ) ; return v . getObject ( ) ;
public class Quotes { /** * Convert strings with both quotes and ticks into a valid xpath component * For example , * { @ code foo } will be converted to { @ code " foo " } , * { @ code f " oo } will be converted to { @ code ' f " oo ' } , * { @ code foo ' " bar } will be converted to { @ code concat ( " foo ' " , ' " ' , " bar " ) } * @ param toEscape a text to escape quotes in , e . g . { @ code " f ' oo " } * @ return the same text with escaped quoted , e . g . { @ code " \ " f ' oo \ " " } */ @ SuppressWarnings ( "JavaDoc" ) public static String escape ( String toEscape ) { } }
if ( toEscape . contains ( "\"" ) && toEscape . contains ( "'" ) ) { boolean quoteIsLast = false ; if ( toEscape . lastIndexOf ( "\"" ) == toEscape . length ( ) - 1 ) { quoteIsLast = true ; } String [ ] substringsWithoutQuotes = toEscape . split ( "\"" ) ; StringBuilder quoted = new StringBuilder ( "concat(" ) ; for ( int i = 0 ; i < substringsWithoutQuotes . length ; i ++ ) { quoted . append ( "\"" ) . append ( substringsWithoutQuotes [ i ] ) . append ( "\"" ) ; quoted . append ( ( ( i == substringsWithoutQuotes . length - 1 ) ? ( quoteIsLast ? ", '\"')" : ")" ) : ", '\"', " ) ) ; } return quoted . toString ( ) ; } // Escape string with just a quote into being single quoted : f " oo - > ' f " oo ' if ( toEscape . contains ( "\"" ) ) { return String . format ( "'%s'" , toEscape ) ; } // Otherwise return the quoted string return String . format ( "\"%s\"" , toEscape ) ;
public class Assistant { /** * Create entity value . * Create a new value for an entity . * This operation is limited to 1000 requests per 30 minutes . For more information , see * * Rate limiting * * . * @ param createValueOptions the { @ link CreateValueOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Value } */ public ServiceCall < Value > createValue ( CreateValueOptions createValueOptions ) { } }
Validator . notNull ( createValueOptions , "createValueOptions cannot be null" ) ; String [ ] pathSegments = { "v1/workspaces" , "entities" , "values" } ; String [ ] pathParameters = { createValueOptions . workspaceId ( ) , createValueOptions . entity ( ) } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments , pathParameters ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "conversation" , "v1" , "createValue" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; final JsonObject contentJson = new JsonObject ( ) ; contentJson . addProperty ( "value" , createValueOptions . value ( ) ) ; if ( createValueOptions . metadata ( ) != null ) { contentJson . add ( "metadata" , GsonSingleton . getGson ( ) . toJsonTree ( createValueOptions . metadata ( ) ) ) ; } if ( createValueOptions . valueType ( ) != null ) { contentJson . addProperty ( "type" , createValueOptions . valueType ( ) ) ; } if ( createValueOptions . synonyms ( ) != null ) { contentJson . add ( "synonyms" , GsonSingleton . getGson ( ) . toJsonTree ( createValueOptions . synonyms ( ) ) ) ; } if ( createValueOptions . patterns ( ) != null ) { contentJson . add ( "patterns" , GsonSingleton . getGson ( ) . toJsonTree ( createValueOptions . patterns ( ) ) ) ; } builder . bodyJson ( contentJson ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Value . class ) ) ;
public class AbstractSegment3F { /** * Replies if two lines are intersecting . * @ param x1 is the first point of the first line . * @ param y1 is the first point of the first line . * @ param z1 is the first point of the first line . * @ param x2 is the second point of the first line . * @ param y2 is the second point of the first line . * @ param z2 is the second point of the first line . * @ param x3 is the first point of the second line . * @ param y3 is the first point of the second line . * @ param z3 is the first point of the second line . * @ param x4 is the second point of the second line . * @ param y4 is the second point of the second line . * @ param z4 is the second point of the second line . * @ return < code > true < / code > if the two shapes are intersecting ; otherwise * < code > false < / code > * @ see " http : / / mathworld . wolfram . com / Line - LineIntersection . html " */ @ Pure public static boolean intersectsLineLine ( double x1 , double y1 , double z1 , double x2 , double y2 , double z2 , double x3 , double y3 , double z3 , double x4 , double y4 , double z4 ) { } }
double s = computeLineLineIntersectionFactor ( x1 , y1 , z1 , x2 , y2 , z2 , x3 , y3 , z3 , x4 , y4 , z4 ) ; return ! Double . isNaN ( s ) ;
public class ImmutableNumberStatistics { /** * Create an immutable copy of another NumberStatistics * @ param statistics the original object * @ return the immutable copy */ public static < T extends Number > ImmutableNumberStatistics < T > copyOf ( NumberStatistics < ? extends T > statistics ) { } }
return new ImmutableNumberStatistics < T > ( statistics ) ;
public class IntermediateModelBuilder { /** * Create default shape processors . */ private List < IntermediateModelShapeProcessor > createShapeProcessors ( ) { } }
final List < IntermediateModelShapeProcessor > processors = new ArrayList < > ( ) ; processors . add ( new AddInputShapes ( this ) ) ; processors . add ( new AddOutputShapes ( this ) ) ; processors . add ( new AddExceptionShapes ( this ) ) ; processors . add ( new AddModelShapes ( this ) ) ; processors . add ( new AddEmptyInputShape ( this ) ) ; processors . add ( new AddEmptyOutputShape ( this ) ) ; return processors ;
public class FmtNumber { /** * { @ inheritDoc } * @ throws SuperCsvCellProcessorException * if value is null or not a Number , or if an invalid decimalFormat String was supplied */ public Object execute ( final Object value , final CsvContext context ) { } }
validateInputNotNull ( value , context ) ; if ( ! ( value instanceof Number ) ) { throw new SuperCsvCellProcessorException ( Number . class , value , context , this ) ; } // create a new DecimalFormat if one is not supplied final DecimalFormat decimalFormatter ; try { decimalFormatter = formatter != null ? formatter : new DecimalFormat ( decimalFormat ) ; } catch ( IllegalArgumentException e ) { throw new SuperCsvCellProcessorException ( String . format ( "'%s' is not a valid decimal format" , decimalFormat ) , context , this , e ) ; } final String result = decimalFormatter . format ( value ) ; return next . execute ( result , context ) ;
public class ModifyVpcEndpointRequest { /** * ( Gateway endpoint ) One or more route tables IDs to associate with the endpoint . * @ return ( Gateway endpoint ) One or more route tables IDs to associate with the endpoint . */ public java . util . List < String > getAddRouteTableIds ( ) { } }
if ( addRouteTableIds == null ) { addRouteTableIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return addRouteTableIds ;
public class PropertyAccessors { /** * < p > isAnnotationPresentOnGetter < / p > * @ param annotation a { @ link java . lang . Class } object . * @ param < T > the type * @ return a boolean . */ public < T extends Annotation > boolean isAnnotationPresentOnGetter ( Class < T > annotation ) { } }
return CreatorUtils . isAnnotationPresent ( annotation , getters ) ;
public class Proposal { /** * Sets the secondarySalespeople value for this Proposal . * @ param secondarySalespeople * List of secondary salespeople who are responsible for the sales * of * the { @ code Proposal } besides primary salesperson . * This attribute is optional . A proposal could have * 8 secondary salespeople at most , * but must also have a { @ link # primarySalesperson } if * any secondary salesperson exists . * This attribute can be configured as editable after * the proposal has been submitted . * Please check with your network administrator for editable * fields configuration . * < span class = " constraint Applicable " > This attribute * is applicable when : < ul > < li > using programmatic guaranteed , using sales * management . < / li > < li > not using programmatic , using sales management . < / li > < / ul > < / span > */ public void setSecondarySalespeople ( com . google . api . ads . admanager . axis . v201902 . SalespersonSplit [ ] secondarySalespeople ) { } }
this . secondarySalespeople = secondarySalespeople ;
public class RepairingNsStreamWriter { /** * Method called after { @ link # findElemPrefix } has returned null , * to create and bind a namespace mapping for specified namespace . */ protected final String generateElemPrefix ( String suggPrefix , String nsURI , SimpleOutputElement elem ) throws XMLStreamException { } }
/* Ok . . . now , since we do not have an existing mapping , let ' s * see if we have a preferred prefix to use . */ /* Except if we need the empty namespace . . . that can only be * bound to the empty prefix : */ if ( nsURI == null || nsURI . length ( ) == 0 ) { return "" ; } /* Ok ; with elements this is easy : the preferred prefix can * ALWAYS be used , since it can mask preceding bindings : */ if ( suggPrefix == null ) { // caller wants this URI to map as the default namespace ? if ( mSuggestedDefNs != null && mSuggestedDefNs . equals ( nsURI ) ) { suggPrefix = "" ; } else { suggPrefix = ( mSuggestedPrefixes == null ) ? null : mSuggestedPrefixes . get ( nsURI ) ; if ( suggPrefix == null ) { /* 16 - Oct - 2005 , TSa : We have 2 choices here , essentially ; * could make elements always try to override the def * ns . . . or can just generate new one . Let ' s do latter * for now . */ if ( mAutoNsSeq == null ) { mAutoNsSeq = new int [ 1 ] ; mAutoNsSeq [ 0 ] = 1 ; } suggPrefix = elem . generateMapping ( mAutomaticNsPrefix , nsURI , mAutoNsSeq ) ; } } } // Ok ; let ' s let the caller deal with bindings return suggPrefix ;
public class ComponentFinder { /** * Gets the type repository used to analyse java classes . * @ return the type supplied type repository , or a default implementation */ public TypeRepository getTypeRepository ( ) { } }
if ( typeRepository == null ) { typeRepository = new DefaultTypeRepository ( getPackageNames ( ) , getExclusions ( ) , getUrlClassLoader ( ) ) ; } return typeRepository ;
public class MqttTopicPermission { /** * Checks if the topic implies a given MqttTopicPermissions topic * @ param topic the topic to check * @ return < code > true < / code > if the given MqttTopicPermissions topic is implied by the current one */ private boolean topicImplicity ( final String topic , final String [ ] splitTopic ) { } }
try { return topicMatcher . matches ( stripedTopic , this . splitTopic , nonWildCard , endsWithWildCard , rootWildCard , topic , splitTopic ) ; } catch ( InvalidTopicException e ) { return false ; }
public class HarborUtils { /** * Verifies that the given timeout is valid * @ param timeout */ public static void validateTimeout ( Duration timeout ) { } }
if ( timeout . equals ( Duration . ZERO ) ) { String message = "The timeout must be nonzero" ; throw new IllegalArgumentException ( message ) ; }
public class AbstractFreeMarkerRenderer { /** * Processes the specified FreeMarker template with the specified request , data model and response . * Puts the page response contents into cache with the key getting from request attribute specified by < i > page cache * key < / i > . * @ param html the specified HTML content * @ param request the specified request * @ param response the specified response * @ throws Exception exception */ protected void doRender ( final String html , final HttpServletRequest request , final HttpServletResponse response ) throws Exception { } }
PrintWriter writer ; try { writer = response . getWriter ( ) ; } catch ( final Exception e ) { writer = new PrintWriter ( response . getOutputStream ( ) ) ; } try { if ( response . isCommitted ( ) ) { // response has been sent redirect writer . flush ( ) ; writer . close ( ) ; return ; } writer . write ( html ) ; writer . flush ( ) ; writer . close ( ) ; } catch ( final Exception e ) { LOGGER . log ( Level . ERROR , "Writes pipe failed: " + e . getMessage ( ) ) ; }
public class MPD9AbstractReader { /** * Retrieve the details of a single project from the database . * @ param result Map instance containing the results * @ param row result set row read from the database */ protected void processProjectListItem ( Map < Integer , String > result , Row row ) { } }
Integer id = row . getInteger ( "PROJ_ID" ) ; String name = row . getString ( "PROJ_NAME" ) ; result . put ( id , name ) ;
public class JvmWildcardTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_WILDCARD_TYPE_REFERENCE__CONSTRAINTS : return constraints != null && ! constraints . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class Model { /** * Find model by composite id values and load specific columns only . * < pre > * Example : * User user = User . dao . findByIdLoadColumns ( new Object [ ] { 123 , 456 } , " name , age " ) ; * < / pre > * @ param idValues the composite id values of the model * @ param columns the specific columns to load */ public M findByIdLoadColumns ( Object [ ] idValues , String columns ) { } }
Table table = _getTable ( ) ; if ( table . getPrimaryKey ( ) . length != idValues . length ) throw new IllegalArgumentException ( "id values error, need " + table . getPrimaryKey ( ) . length + " id value" ) ; Config config = _getConfig ( ) ; String sql = config . dialect . forModelFindById ( table , columns ) ; List < M > result = find ( config , sql , idValues ) ; return result . size ( ) > 0 ? result . get ( 0 ) : null ;
public class BoxRequestUpdateSharedItem { /** * Sets the date that this shared link will be deactivated . If this is set to null it will remove the * unshared at date that is set on this item . * Note : the date will be rounded to the day as the API does not support hours , minutes , or seconds * @ param unsharedAt the date that this shared link will be deactivated . * @ return the updated request * @ throws ParseException thrown if date provided cannot be properly parsed . */ public R setUnsharedAt ( Date unsharedAt ) throws ParseException { } }
JsonObject jsonObject = getSharedLinkJsonObject ( ) ; if ( unsharedAt == null ) { jsonObject . add ( BoxSharedLink . FIELD_UNSHARED_AT , JsonValue . NULL ) ; } else { jsonObject . add ( BoxSharedLink . FIELD_UNSHARED_AT , BoxDateFormat . format ( unsharedAt ) ) ; } BoxSharedLink sharedLink = new BoxSharedLink ( jsonObject ) ; mBodyMap . put ( BoxItem . FIELD_SHARED_LINK , sharedLink ) ; return ( R ) this ;
public class Proxy { /** * configures a proxy from properties ; if prefix is http or https you get standard system properties * https : / / docs . oracle . com / javase / 8 / docs / api / java / net / doc - files / net - properties . html * @ param prefix is usually the protocol - http or https */ public static Proxy forPropertiesOpt ( String prefix ) { } }
String host ; String port ; Proxy result ; host = System . getProperty ( prefix + ".proxyHost" ) ; if ( host == null ) { return null ; } port = System . getProperty ( prefix + ".proxyPort" ) ; if ( port == null ) { throw new IllegalStateException ( "missing proxy port for host " + host ) ; } result = new Proxy ( host , Integer . parseInt ( port ) ) ; result . excludes . addAll ( NON_PROXY_SEP . split ( System . getProperty ( prefix + ".nonProxyHosts" , "" ) ) ) ; return result ;
public class ReflectionUtils { /** * Checks if field or corresponding read method is annotated with given annotationType . * @ param field Field to check * @ param annotationType Annotation you ' re looking for . * @ return true if field or read method it annotated with given annotationType or false . */ public static boolean isAnnotationPresent ( Field field , Class < ? extends Annotation > annotationType ) { } }
final Optional < Method > readMethod = getReadMethod ( field ) ; return field . isAnnotationPresent ( annotationType ) || readMethod . isPresent ( ) && readMethod . get ( ) . isAnnotationPresent ( annotationType ) ;
public class CliClient { /** * Opens the interactive CLI shell . */ public void open ( ) { } }
isRunning = true ; // print welcome terminal . writer ( ) . append ( CliStrings . MESSAGE_WELCOME ) ; // begin reading loop while ( isRunning ) { // make some space to previous command terminal . writer ( ) . append ( "\n" ) ; terminal . flush ( ) ; final String line ; try { line = lineReader . readLine ( prompt , null , ( MaskingCallback ) null , null ) ; } catch ( UserInterruptException e ) { // user cancelled line with Ctrl + C continue ; } catch ( EndOfFileException | IOError e ) { // user cancelled application with Ctrl + D or kill break ; } catch ( Throwable t ) { throw new SqlClientException ( "Could not read from command line." , t ) ; } if ( line == null ) { continue ; } final Optional < SqlCommandCall > cmdCall = parseCommand ( line ) ; cmdCall . ifPresent ( this :: callCommand ) ; }
public class UserData { /** * Sets userData for user , data is set in cache and sent to user * @ param key Name of value * @ param value Data to be set */ public void set ( String key , String value ) { } }
cache . put ( key , value ) ; user . sendGlobal ( "JWWF-storageSet" , "{\"key\":" + Json . escapeString ( key ) + ",\"value\":" + Json . escapeString ( value ) + "}" ) ;
public class ZMatrixReader { /** * Private method that actually parses the input to read a ChemFile * object . * @ param file the file to read from * @ return A ChemFile containing the data parsed from input . */ private IChemFile readChemFile ( IChemFile file ) { } }
IChemSequence chemSequence = file . getBuilder ( ) . newInstance ( IChemSequence . class ) ; int number_of_atoms ; StringTokenizer tokenizer ; try { String line = input . readLine ( ) ; while ( line . startsWith ( "#" ) ) line = input . readLine ( ) ; /* * while ( input . ready ( ) & & line ! = null ) { */ // logger . debug ( " lauf " ) ; // parse frame by frame tokenizer = new StringTokenizer ( line , "\t ,;" ) ; String token = tokenizer . nextToken ( ) ; number_of_atoms = Integer . parseInt ( token ) ; String info = input . readLine ( ) ; IChemModel chemModel = file . getBuilder ( ) . newInstance ( IChemModel . class ) ; IAtomContainerSet setOfMolecules = file . getBuilder ( ) . newInstance ( IAtomContainerSet . class ) ; IAtomContainer m = file . getBuilder ( ) . newInstance ( IAtomContainer . class ) ; m . setTitle ( info ) ; String [ ] types = new String [ number_of_atoms ] ; double [ ] d = new double [ number_of_atoms ] ; int [ ] d_atom = new int [ number_of_atoms ] ; // Distances double [ ] a = new double [ number_of_atoms ] ; int [ ] a_atom = new int [ number_of_atoms ] ; // Angles double [ ] da = new double [ number_of_atoms ] ; int [ ] da_atom = new int [ number_of_atoms ] ; // Diederangles // Point3d [ ] pos = new Point3d [ number _ of _ atoms ] ; / / calculated positions int i = 0 ; while ( i < number_of_atoms ) { line = input . readLine ( ) ; // logger . debug ( " line : \ " " + line + " \ " " ) ; if ( line == null ) break ; if ( line . startsWith ( "#" ) ) { // skip comment in file } else { d [ i ] = 0d ; d_atom [ i ] = - 1 ; a [ i ] = 0d ; a_atom [ i ] = - 1 ; da [ i ] = 0d ; da_atom [ i ] = - 1 ; tokenizer = new StringTokenizer ( line , "\t ,;" ) ; int fields = tokenizer . countTokens ( ) ; if ( fields < Math . min ( i * 2 + 1 , 7 ) ) { // this is an error but cannot throw exception } else if ( i == 0 ) { types [ i ] = tokenizer . nextToken ( ) ; i ++ ; } else if ( i == 1 ) { types [ i ] = tokenizer . nextToken ( ) ; d_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; d [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; i ++ ; } else if ( i == 2 ) { types [ i ] = tokenizer . nextToken ( ) ; d_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; d [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; a_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; a [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; i ++ ; } else { types [ i ] = tokenizer . nextToken ( ) ; d_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; d [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; a_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; a [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; da_atom [ i ] = Integer . valueOf ( tokenizer . nextToken ( ) ) - 1 ; da [ i ] = Double . valueOf ( tokenizer . nextToken ( ) ) ; i ++ ; } } } // calculate cartesian coordinates Point3d [ ] cartCoords = ZMatrixTools . zmatrixToCartesian ( d , d_atom , a , a_atom , da , da_atom ) ; for ( i = 0 ; i < number_of_atoms ; i ++ ) { m . addAtom ( file . getBuilder ( ) . newInstance ( IAtom . class , types [ i ] , cartCoords [ i ] ) ) ; } // logger . debug ( " molecule : " + m ) ; setOfMolecules . addAtomContainer ( m ) ; chemModel . setMoleculeSet ( setOfMolecules ) ; chemSequence . addChemModel ( chemModel ) ; line = input . readLine ( ) ; file . addChemSequence ( chemSequence ) ; } catch ( IOException e ) { // should make some noise now file = null ; } return file ;
public class LongDoubleHashMap { /** * Compute the capacity needed for a given size . * @ param expectedSize expected size of the map * @ return capacity to use for the specified size */ private static int computeCapacity ( final int expectedSize ) { } }
if ( expectedSize == 0 ) { return 1 ; } final int capacity = ( int ) InternalFastMath . ceil ( expectedSize / LOAD_FACTOR ) ; final int powerOfTwo = Integer . highestOneBit ( capacity ) ; if ( powerOfTwo == capacity ) { return capacity ; } return nextPowerOfTwo ( capacity ) ;
public class AmbiguityLibrary { /** * 加载 * @ return */ private static synchronized Forest init ( String key , KV < String , Forest > kv , boolean reload ) { } }
Forest forest = kv . getV ( ) ; if ( forest != null ) { if ( reload ) { forest . clear ( ) ; } else { return forest ; } } else { forest = new Forest ( ) ; } try ( BufferedReader br = IOUtil . getReader ( PathToStream . stream ( kv . getK ( ) ) , "utf-8" ) ) { String temp ; LOG . debug ( "begin init ambiguity" ) ; long start = System . currentTimeMillis ( ) ; while ( ( temp = br . readLine ( ) ) != null ) { if ( StringUtil . isNotBlank ( temp ) ) { temp = StringUtil . trim ( temp ) ; String [ ] split = temp . split ( "\t" ) ; StringBuilder sb = new StringBuilder ( ) ; if ( split . length % 2 != 0 ) { LOG . error ( "init ambiguity error in line :" + temp + " format err !" ) ; continue ; } for ( int i = 0 ; i < split . length ; i += 2 ) { sb . append ( split [ i ] ) ; } forest . addBranch ( sb . toString ( ) , split ) ; } } LOG . info ( "load dic use time:" + ( System . currentTimeMillis ( ) - start ) + " path is : " + kv . getK ( ) ) ; kv . setV ( forest ) ; return forest ; } catch ( Exception e ) { LOG . error ( "Init ambiguity library error :" + e . getMessage ( ) + ", path: " + kv . getK ( ) ) ; AMBIGUITY . remove ( key ) ; return null ; }
public class ClientCallbackHandler { /** * This method is invoked by SASL for authentication challenges * @ param callbacks a collection of challenge callbacks */ public void handle ( Callback [ ] callbacks ) throws IOException , UnsupportedCallbackException { } }
for ( Callback c : callbacks ) { if ( c instanceof NameCallback ) { LOG . debug ( "name callback" ) ; } else if ( c instanceof PasswordCallback ) { LOG . debug ( "password callback" ) ; LOG . warn ( "Could not login: the client is being asked for a password, but the " + " client code does not currently support obtaining a password from the user." + " Make sure that the client is configured to use a ticket cache (using" + " the JAAS configuration setting 'useTicketCache=true)' and restart the client. If" + " you still get this message after that, the TGT in the ticket cache has expired and must" + " be manually refreshed. To do so, first determine if you are using a password or a" + " keytab. If the former, run kinit in a Unix shell in the environment of the user who" + " is running this client using the command" + " 'kinit <princ>' (where <princ> is the name of the client's Kerberos principal)." + " If the latter, do" + " 'kinit -k -t <keytab> <princ>' (where <princ> is the name of the Kerberos principal, and" + " <keytab> is the location of the keytab file). After manually refreshing your cache," + " restart this client. If you continue to see this message after manually refreshing" + " your cache, ensure that your KDC host's clock is in sync with this host's clock." ) ; } else if ( c instanceof AuthorizeCallback ) { LOG . debug ( "authorization callback" ) ; AuthorizeCallback ac = ( AuthorizeCallback ) c ; String authid = ac . getAuthenticationID ( ) ; String authzid = ac . getAuthorizationID ( ) ; if ( authid . equals ( authzid ) ) { ac . setAuthorized ( true ) ; } else { ac . setAuthorized ( false ) ; } if ( ac . isAuthorized ( ) ) { ac . setAuthorizedID ( authzid ) ; } } else { throw new UnsupportedCallbackException ( c ) ; } }
public class ValueEnforcer { /** * Check if * < code > nValue & gt ; nLowerBoundInclusive & amp ; & amp ; nValue & lt ; nUpperBoundInclusive < / code > * @ param aValue * Value * @ param aName * Name * @ param aLowerBoundExclusive * Lower bound * @ param aUpperBoundExclusive * Upper bound * @ return The value */ public static BigDecimal isBetweenExclusive ( final BigDecimal aValue , @ Nonnull final Supplier < ? extends String > aName , @ Nonnull final BigDecimal aLowerBoundExclusive , @ Nonnull final BigDecimal aUpperBoundExclusive ) { } }
notNull ( aValue , aName ) ; notNull ( aLowerBoundExclusive , "LowerBoundInclusive" ) ; notNull ( aUpperBoundExclusive , "UpperBoundInclusive" ) ; if ( isEnabled ( ) ) if ( aValue . compareTo ( aLowerBoundExclusive ) <= 0 || aValue . compareTo ( aUpperBoundExclusive ) >= 0 ) throw new IllegalArgumentException ( "The value of '" + aName . get ( ) + "' must be > " + aLowerBoundExclusive + " and < " + aUpperBoundExclusive + "! The current value is: " + aValue ) ; return aValue ;
public class StaticTypeCheckingSupport { /** * Given a generics type representing SomeClass & lt ; T , V & gt ; and a resolved placeholder map , returns a new generics type * for which placeholders are resolved recursively . */ protected static GenericsType fullyResolve ( GenericsType gt , Map < GenericsTypeName , GenericsType > placeholders ) { } }
GenericsType fromMap = placeholders . get ( new GenericsTypeName ( gt . getName ( ) ) ) ; if ( gt . isPlaceholder ( ) && fromMap != null ) { gt = fromMap ; } ClassNode type = fullyResolveType ( gt . getType ( ) , placeholders ) ; ClassNode lowerBound = gt . getLowerBound ( ) ; if ( lowerBound != null ) lowerBound = fullyResolveType ( lowerBound , placeholders ) ; ClassNode [ ] upperBounds = gt . getUpperBounds ( ) ; if ( upperBounds != null ) { ClassNode [ ] copy = new ClassNode [ upperBounds . length ] ; for ( int i = 0 , upperBoundsLength = upperBounds . length ; i < upperBoundsLength ; i ++ ) { final ClassNode upperBound = upperBounds [ i ] ; copy [ i ] = fullyResolveType ( upperBound , placeholders ) ; } upperBounds = copy ; } GenericsType genericsType = new GenericsType ( type , upperBounds , lowerBound ) ; genericsType . setWildcard ( gt . isWildcard ( ) ) ; return genericsType ;
public class AmazonEC2Client { /** * Stops advertising an IPv4 address range that is provisioned as an address pool . * You can perform this operation at most once every 10 seconds , even if you specify different address ranges each * time . * It can take a few minutes before traffic to the specified addresses stops routing to AWS because of BGP * propagation delays . * @ param withdrawByoipCidrRequest * @ return Result of the WithdrawByoipCidr operation returned by the service . * @ sample AmazonEC2 . WithdrawByoipCidr * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / WithdrawByoipCidr " target = " _ top " > AWS API * Documentation < / a > */ @ Override public WithdrawByoipCidrResult withdrawByoipCidr ( WithdrawByoipCidrRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeWithdrawByoipCidr ( request ) ;
public class HistoryController { /** * Retrieve the history for a profile * @ param mode * @ param profileIdentifier * @ param clientUUID * @ param offset * @ param limit * @ param sourceURIFilters * @ param page * @ param rows * @ return * @ throws Exception */ @ RequestMapping ( value = "/api/history/{profileIdentifier}" , method = RequestMethod . GET ) public @ ResponseBody HashMap < String , Object > getHistory ( Model mode , @ PathVariable String profileIdentifier , @ RequestParam ( value = "clientUUID" , defaultValue = Constants . PROFILE_CLIENT_DEFAULT_ID ) String clientUUID , @ RequestParam ( value = "offset" , defaultValue = "0" ) int offset , @ RequestParam ( value = "limit" , defaultValue = "-1" ) int limit , @ RequestParam ( value = "source_uri[]" , required = false ) String [ ] sourceURIFilters , @ RequestParam ( value = "page" , defaultValue = "1" ) int page , @ RequestParam ( value = "rows" , defaultValue = "-1" ) int rows , @ RequestParam ( value = "hasMessage" , defaultValue = "false" ) boolean hasMessage ) throws Exception { } }
Integer profileId = ControllerUtils . convertProfileIdentifier ( profileIdentifier ) ; HashMap < String , String [ ] > filters = new HashMap < String , String [ ] > ( ) ; if ( sourceURIFilters != null ) { filters . put ( Constants . HISTORY_FILTER_SOURCE_URI , sourceURIFilters ) ; } // rows exists because jqgrid uses it . . but limit is more common // set limit to rows if it was set if ( rows != - 1 ) { limit = rows ; } // offset id # of page ( - 1 ) * rows if not passed in if ( offset == 0 ) { offset = ( page - 1 ) * rows ; } History [ ] histories = HistoryService . getInstance ( ) . getHistory ( profileId , clientUUID , offset , limit , false , filters , hasMessage ) ; int totalRows = HistoryService . getInstance ( ) . getHistoryCount ( profileId , clientUUID , filters ) ; HashMap < String , Object > returnJSON = Utils . getJQGridJSON ( histories , "history" , offset , totalRows , limit ) ; return returnJSON ;
public class ThemeManager { /** * Only for system use */ @ SuppressLint ( "NewApi" ) public static void startActivity ( Context context , Intent intent , int requestCode , Bundle options ) { } }
final Activity activity = context instanceof Activity ? ( Activity ) context : null ; if ( activity != null && HoloEverywhere . ALWAYS_USE_PARENT_THEME ) { ThemeManager . cloneTheme ( activity . getIntent ( ) , intent , true ) ; } final int parentColorScheme = ThemeManager . getThemeType ( activity ) ; if ( parentColorScheme != INVALID ) { intent . putExtra ( _PARENT_SCHEME_TAG , parentColorScheme ) ; } if ( context instanceof SuperStartActivity ) { ( ( SuperStartActivity ) context ) . superStartActivity ( intent , requestCode , options ) ; } else if ( VERSION . SDK_INT >= 16 ) { context . startActivity ( intent , options ) ; } else { context . startActivity ( intent ) ; }
public class ClientConfig { /** * Users can overload this method to define in which scenarios a request should result * in an ' intercepted ' page with proper windowId detection . This can e . g . contain * blacklisting some userAgents . * By default the following User - Agents will be served directly : * < ul > * < li > . * bot . * < / li > * < li > . * Bot . * < / li > * < li > . * Slurp . * < / li > * < li > . * Crawler . * < / li > * < / ul > * @ return < code > true < / code > if the Request should get ' intercepted ' and the intermediate * windowhandler . html page should get rendered first . By returning < code > false < / code > * the requested page will get rendered intermediately . * @ see # getUserAgent ( javax . faces . context . FacesContext ) for determining the UserAgent */ public boolean isClientSideWindowHandlerRequest ( FacesContext facesContext ) { } }
if ( ! isJavaScriptEnabled ( ) ) { return false ; } String userAgent = getUserAgent ( facesContext ) ; if ( userAgent != null && ( userAgent . indexOf ( "bot" ) >= 0 || // Googlebot , etc userAgent . indexOf ( "Bot" ) >= 0 || // BingBot , etc userAgent . indexOf ( "Slurp" ) >= 0 || // Yahoo Slurp userAgent . indexOf ( "Crawler" ) >= 0 // various other Crawlers ) ) { return false ; } return true ;
public class HTODInvalidationBuffer { /** * Call this method to check whether a " full " condition is met to start LPBT . * @ return boolean - true means the condition " full " . */ @ Trivial public synchronized boolean isFull ( ) { } }
// final String methodName = " isFull ( ) " ; boolean isFull = false ; int size = this . explicitBuffer . size ( ) + this . scanBuffer . size ( ) + this . garbageCollectorBuffer . size ( ) ; if ( size > this . maxInvalidationBufferSize || ( System . currentTimeMillis ( ) - this . lastRemoveTime ) >= this . maxInvalidationBufferLife ) { isFull = true ; setlastRemoveTime ( ) ; } // if ( isFull ) { // traceDebug ( methodName , " cacheName = " + this . cod . cacheName + " isFull = " + isFull + " explicitBuffer = " + // explicitBuffer . size ( ) + " scanBuffer = " + this . scanBuffer . size ( ) ) ; return isFull ;
public class PropertyDescription { /** * Returns the property ' s type */ public TypeDescription getType ( ) { } }
if ( mType == null ) { mType = getTeaToolsUtils ( ) . createTypeDescription ( getPropertyDescriptor ( ) . getPropertyType ( ) ) ; } return mType ;
public class DeviceiSCSIAttributesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeviceiSCSIAttributes deviceiSCSIAttributes , ProtocolMarshaller protocolMarshaller ) { } }
if ( deviceiSCSIAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deviceiSCSIAttributes . getTargetARN ( ) , TARGETARN_BINDING ) ; protocolMarshaller . marshall ( deviceiSCSIAttributes . getNetworkInterfaceId ( ) , NETWORKINTERFACEID_BINDING ) ; protocolMarshaller . marshall ( deviceiSCSIAttributes . getNetworkInterfacePort ( ) , NETWORKINTERFACEPORT_BINDING ) ; protocolMarshaller . marshall ( deviceiSCSIAttributes . getChapEnabled ( ) , CHAPENABLED_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class HermesCommandLineApp { /** * Creates a corpus based on the command line parameters . * @ return the corpus */ public Corpus getCorpus ( ) { } }
return Corpus . builder ( ) . distributed ( distributed ) . source ( inputFormat , input ) . build ( ) ;
public class ClassLoaderResourceUtils { /** * Checks whether the class is present . * @ param classname * the name of class to be checked * @ return true if the class is present . */ public static boolean isClassPresent ( String classname ) { } }
try { Class . forName ( classname ) ; return true ; } catch ( ClassNotFoundException e ) { } return false ;
public class DescribeStacksResult { /** * Information about the stacks . * @ param stacks * Information about the stacks . */ public void setStacks ( java . util . Collection < Stack > stacks ) { } }
if ( stacks == null ) { this . stacks = null ; return ; } this . stacks = new java . util . ArrayList < Stack > ( stacks ) ;
public class Money { /** * Obtains an instance of { @ code Money } as the total value of an array . * The array must contain at least one monetary value . * Subsequent amounts are added as though using { @ link # plus ( Money ) } . * All amounts must be in the same currency . * @ param monies the monetary values to total , not empty , no null elements , not null * @ return the total , never null * @ throws IllegalArgumentException if the array is empty * @ throws CurrencyMismatchException if the currencies differ */ public static Money total ( Money ... monies ) { } }
MoneyUtils . checkNotNull ( monies , "Money array must not be null" ) ; if ( monies . length == 0 ) { throw new IllegalArgumentException ( "Money array must not be empty" ) ; } Money total = monies [ 0 ] ; MoneyUtils . checkNotNull ( total , "Money arary must not contain null entries" ) ; for ( int i = 1 ; i < monies . length ; i ++ ) { total = total . plus ( monies [ i ] ) ; } return total ;
public class DruidCoordinatorRuntimeParams { /** * Creates a TreeSet sorted in { @ link DruidCoordinator # SEGMENT _ COMPARATOR _ RECENT _ FIRST } order and populates it with * the segments from the given iterable . The given iterable is iterated exactly once . No special action is taken if * duplicate segments are encountered in the iterable . */ public static TreeSet < DataSegment > createAvailableSegmentsSet ( Iterable < DataSegment > availableSegments ) { } }
TreeSet < DataSegment > segmentsSet = new TreeSet < > ( DruidCoordinator . SEGMENT_COMPARATOR_RECENT_FIRST ) ; availableSegments . forEach ( segmentsSet :: add ) ; return segmentsSet ;
public class StandardBullhornData { /** * Makes the " search " api call with POST instead of GET * HTTP Method : POST * @ param type the BullhornEntity type * @ param query Lucene query string * @ param fieldSet the fields to return , if null or empty will default to " * " all * @ param params optional SearchParams . * @ return a LinsWrapper containing the records plus some additional information */ protected < L extends ListWrapper < T > , T extends SearchEntity > L handleSearchForEntitiesWithPost ( Class < T > type , String query , Set < String > fieldSet , SearchParams params ) { } }
Map < String , String > uriVariables = restUriVariablesFactory . getUriVariablesForSearchWithPost ( BullhornEntityInfo . getTypesRestEntityName ( type ) , fieldSet , params ) ; String url = restUrlFactory . assembleSearchUrlWithPost ( params ) ; if ( Candidate . class == type ) { url = url + "&useV2=true" ; } JSONObject body = new JSONObject ( ) ; body . put ( "query" , query ) ; return ( L ) this . performPostRequest ( url , body . toString ( ) , BullhornEntityInfo . getTypesListWrapperType ( type ) , uriVariables ) ;
public class JobTemplateSettings { /** * Use Inputs ( inputs ) to define the source file used in the transcode job . There can only be one input in a job * template . Using the API , you can include multiple inputs when referencing a job template . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setInputs ( java . util . Collection ) } or { @ link # withInputs ( java . util . Collection ) } if you want to override the * existing values . * @ param inputs * Use Inputs ( inputs ) to define the source file used in the transcode job . There can only be one input in a * job template . Using the API , you can include multiple inputs when referencing a job template . * @ return Returns a reference to this object so that method calls can be chained together . */ public JobTemplateSettings withInputs ( InputTemplate ... inputs ) { } }
if ( this . inputs == null ) { setInputs ( new java . util . ArrayList < InputTemplate > ( inputs . length ) ) ; } for ( InputTemplate ele : inputs ) { this . inputs . add ( ele ) ; } return this ;
public class ClassDocImpl { /** * Find constructor in this class . * @ param constrName the unqualified name to search for . * @ param paramTypes the array of Strings for constructor parameters . * @ return the first ConstructorDocImpl which matches , null if not found . */ public ConstructorDoc findConstructor ( String constrName , String [ ] paramTypes ) { } }
Names names = tsym . name . table . names ; for ( Symbol sym : tsym . members ( ) . getSymbolsByName ( names . fromString ( "<init>" ) ) ) { if ( sym . kind == MTH ) { if ( hasParameterTypes ( ( MethodSymbol ) sym , paramTypes ) ) { return env . getConstructorDoc ( ( MethodSymbol ) sym ) ; } } } // # # # ( gj ) As a temporary measure until type variables are better // # # # handled , try again without the parameter types . // # # # This will often find the right constructor , and occassionally // # # # find the wrong one . // if ( paramTypes ! = null ) { // return findConstructor ( constrName , null ) ; return null ;
public class MemoryFileSystem { /** * { @ inheritDoc } */ public synchronized void remove ( Entry entry ) { } }
List < Entry > entries ; if ( entry == null ) { return ; } DirectoryEntry parent = entry . getParent ( ) ; if ( parent == null ) { return ; } else { entries = contents . get ( parent ) ; if ( entries == null ) { return ; } } for ( Iterator < Entry > i = entries . iterator ( ) ; i . hasNext ( ) ; ) { Entry e = ( Entry ) i . next ( ) ; if ( entry . equals ( e ) ) { if ( e instanceof DirectoryEntry ) { Entry [ ] children = listEntries ( ( DirectoryEntry ) e ) ; for ( int j = children . length - 1 ; j >= 0 ; j -- ) { remove ( children [ j ] ) ; } contents . remove ( e ) ; } i . remove ( ) ; return ; } }
public class Collectors { /** * Returns a { @ code Collector } that filters input elements . * @ param < T > the type of the input elements * @ param < A > the accumulation type * @ param < R > the type of the output elements * @ param predicate a predicate used to filter elements * @ param downstream the collector of filtered elements * @ return a { @ code Collector } * @ since 1.1.3 */ @ NotNull public static < T , A , R > Collector < T , ? , R > filtering ( @ NotNull final Predicate < ? super T > predicate , @ NotNull final Collector < ? super T , A , R > downstream ) { } }
final BiConsumer < A , ? super T > accumulator = downstream . accumulator ( ) ; return new CollectorsImpl < T , A , R > ( downstream . supplier ( ) , new BiConsumer < A , T > ( ) { @ Override public void accept ( A a , T t ) { if ( predicate . test ( t ) ) accumulator . accept ( a , t ) ; } } , downstream . finisher ( ) ) ;
public class GAEBlobServlet { /** * Encode header value for Content - Disposition */ private static String getEncodeFileName ( String userAgent , String fileName ) { } }
String encodedFileName = fileName ; try { if ( userAgent . contains ( "MSIE" ) || userAgent . contains ( "Opera" ) ) { encodedFileName = URLEncoder . encode ( fileName , "UTF-8" ) ; } else { encodedFileName = "=?UTF-8?B?" + new String ( BaseEncoding . base64 ( ) . encode ( fileName . getBytes ( "UTF-8" ) ) ) + "?=" ; } } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) ) ; } return encodedFileName ;
public class EhCacheProvider { /** * Load resource . * @ param configurationResourceName * the configuration resource name * @ return the uRL */ private URL loadResource ( String configurationResourceName ) { } }
ClassLoader standardClassloader = ClassLoaderUtil . getStandardClassLoader ( ) ; URL url = null ; if ( standardClassloader != null ) { url = standardClassloader . getResource ( configurationResourceName ) ; } if ( url == null ) { url = this . getClass ( ) . getResource ( configurationResourceName ) ; } log . info ( "Creating EhCacheFactory from a specified resource: " + configurationResourceName + " Resolved to URL: " + url ) ; if ( url == null ) { log . warn ( "A configurationResourceName was set to {} but the resource could not be loaded from the classpath.Ehcache will configure itself using defaults." , configurationResourceName ) ; } return url ;
public class JNDIServiceBinder { /** * { @ inheritDoc } */ @ Override public void serviceChanged ( ServiceEvent event ) { } }
ServiceReference < ? > ref = event . getServiceReference ( ) ; switch ( event . getType ( ) ) { case ServiceEvent . REGISTERED : recordEntry ( ref ) ; break ; case ServiceEvent . MODIFIED : moveEntry ( ref ) ; break ; case ServiceEvent . MODIFIED_ENDMATCH : case ServiceEvent . UNREGISTERING : removeEntry ( ref ) ; break ; default : break ; }
public class Money { /** * < pre > * The 3 - letter currency code defined in ISO 4217. * < / pre > * < code > string currency _ code = 1 ; < / code > */ public java . lang . String getCurrencyCode ( ) { } }
java . lang . Object ref = currencyCode_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; currencyCode_ = s ; return s ; }
public class GwtFaceExampleStandalone { /** * CHECKSTYLE VISIBILITY MODIFIER : ON */ @ Override public void onModuleLoad ( ) { } }
SampleTree . setTreeTitle ( MESSAGES . treeTitle ( ) ) ; // layer samples SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . openCycleMapTitle ( ) , "[ISOMORPHIC]/geomajas/osgeo/layer-raster.png" , OpenCycleMapSample . TITLE , "Layers" , OpenCycleMapSample . FACTORY ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . tmsTitle ( ) , "[ISOMORPHIC]/geomajas/osgeo/layer-raster.png" , TmsSample . TITLE , "Layers" , TmsSample . FACTORY ) ) ; // Security samples : SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . treeGroupSecurity ( ) , "[ISOMORPHIC]/geomajas/silk/key.png" , SECURITY_GROUP , "topLevel" ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . layerSecurityTitle ( ) , "[ISOMORPHIC]/geomajas/staticsecurity/key_go.png" , LayerSecuritySample . LAYER_SECURITY_TITLE , SECURITY_GROUP , LayerSecuritySample . FACTORY ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . filterSecurityTitle ( ) , "[ISOMORPHIC]/geomajas/staticsecurity/key_go.png" , FilterSecuritySample . TITLE , SECURITY_GROUP , FilterSecuritySample . FACTORY ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . attributeSecurityTitle ( ) , "[ISOMORPHIC]/geomajas/staticsecurity/key_go.png" , AttributeSecuritySample . TITLE , SECURITY_GROUP , AttributeSecuritySample . FACTORY ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . commandSecurityTitle ( ) , "[ISOMORPHIC]/geomajas/staticsecurity/key_go.png" , CommandSecuritySample . TITLE , SECURITY_GROUP , CommandSecuritySample . FACTORY ) ) ; SampleTreeNodeRegistry . addSampleTreeNode ( new SampleTreeNode ( MESSAGES . toolSecurityTitle ( ) , "[ISOMORPHIC]/geomajas/staticsecurity/key_go.png" , ToolSecuritySample . TITLE , SECURITY_GROUP , ToolSecuritySample . FACTORY ) ) ; ExampleLayout exampleLayout = new ExampleLayout ( ) ; exampleLayout . setAuthenticationHandler ( new ShowcaseAuthenticationHandler ( ) ) ; exampleLayout . buildUi ( ) ; // security demo final Label userLabel = exampleLayout . getUserLabel ( ) ; GwtCommandDispatcher dispatcher = GwtCommandDispatcher . getInstance ( ) ; dispatcher . setTokenRequestHandler ( new ShowcaseTokenRequestHandler ( ) ) ; dispatcher . addTokenChangedHandler ( new TokenChangedHandler ( ) { public void onTokenChanged ( TokenChangedEvent event ) { String userId = null ; if ( null != event . getUserDetail ( ) ) { userId = event . getUserDetail ( ) . getUserId ( ) ; } if ( null == userId ) { userLabel . setContents ( "No user is logged in." ) ; } else { userLabel . setContents ( "Logged in with: " + userId ) ; if ( null != runOnLogin ) { runOnLogin . run ( ) ; runOnLogin = null ; } } } } ) ; SsecAccess . login ( "luc" , "luc" , null ) ;
public class ZookeeperRegistry { /** * 注册 服务信息 * @ param config * @ return * @ throws Exception */ protected void registerProviderUrls ( ProviderConfig config ) { } }
String appName = config . getAppName ( ) ; // 注册服务端节点 try { // 避免重复计算 List < String > urls ; if ( providerUrls . containsKey ( config ) ) { urls = providerUrls . get ( config ) ; } else { urls = ZookeeperRegistryHelper . convertProviderToUrls ( config ) ; providerUrls . put ( config , urls ) ; } if ( CommonUtils . isNotEmpty ( urls ) ) { String providerPath = buildProviderPath ( rootPath , config ) ; if ( LOGGER . isInfoEnabled ( appName ) ) { LOGGER . infoWithApp ( appName , LogCodes . getLog ( LogCodes . INFO_ROUTE_REGISTRY_PUB_START , providerPath ) ) ; } for ( String url : urls ) { url = URLEncoder . encode ( url , "UTF-8" ) ; String providerUrl = providerPath + CONTEXT_SEP + url ; try { getAndCheckZkClient ( ) . create ( ) . creatingParentContainersIfNeeded ( ) . withMode ( ephemeralNode ? CreateMode . EPHEMERAL : CreateMode . PERSISTENT ) // 是否永久节点 . forPath ( providerUrl , config . isDynamic ( ) ? PROVIDER_ONLINE : PROVIDER_OFFLINE ) ; // 是否默认上下线 if ( LOGGER . isInfoEnabled ( appName ) ) { LOGGER . infoWithApp ( appName , LogCodes . getLog ( LogCodes . INFO_ROUTE_REGISTRY_PUB , providerUrl ) ) ; } } catch ( KeeperException . NodeExistsException nodeExistsException ) { if ( LOGGER . isWarnEnabled ( appName ) ) { LOGGER . warnWithApp ( appName , "provider has exists in zookeeper, provider=" + providerUrl ) ; } } } if ( LOGGER . isInfoEnabled ( appName ) ) { LOGGER . infoWithApp ( appName , LogCodes . getLog ( LogCodes . INFO_ROUTE_REGISTRY_PUB_OVER , providerPath ) ) ; } } } catch ( Exception e ) { throw new SofaRpcRuntimeException ( "Failed to register provider to zookeeperRegistry!" , e ) ; } if ( EventBus . isEnable ( ProviderPubEvent . class ) ) { ProviderPubEvent event = new ProviderPubEvent ( config ) ; EventBus . post ( event ) ; }
public class TargetEncoder { /** * Core method for applying pre - calculated encodings to the dataset . There are multiple overloaded methods that we will * probably be able to get rid off if we are not going to expose Java API for TE . * We can just stick to one signature that will suit internal representations of the AutoML ' s pipeline . * @ param data dataset that will be used as a base for creation of encodings . * @ param targetColumnName name of the column with respect to which we were computing encodings . * @ param columnToEncodingMap map of the prepared encodings with the keys being the names of the columns . * @ param dataLeakageHandlingStrategy see TargetEncoding . DataLeakageHandlingStrategy / / TODO use common interface for stronger type safety . * @ param foldColumnName column ' s name that contains fold number the row is belong to . * @ param withBlendedAvg whether to apply blending or not . * @ param noiseLevel amount of noise to add to the final encodings . * @ param imputeNAsWithNewCategory set to ` true ` to impute NAs with new category . * @ param seed we might want to specify particular values for reproducibility in tests . * @ return copy of the ` data ` frame with encodings */ public Frame applyTargetEncoding ( Frame data , String targetColumnName , Map < String , Frame > columnToEncodingMap , byte dataLeakageHandlingStrategy , String foldColumnName , boolean withBlendedAvg , double noiseLevel , boolean imputeNAsWithNewCategory , long seed ) { } }
if ( noiseLevel < 0 ) throw new IllegalStateException ( "`_noiseLevel` must be non-negative" ) ; Frame dataWithAllEncodings = null ; try { dataWithAllEncodings = data . deepCopy ( Key . make ( ) . toString ( ) ) ; DKV . put ( dataWithAllEncodings ) ; ensureTargetColumnIsBinaryCategorical ( dataWithAllEncodings , targetColumnName ) ; for ( String teColumnName : _columnNamesToEncode ) { imputeNAsForColumn ( dataWithAllEncodings , teColumnName , teColumnName + "_NA" ) ; String newEncodedColumnName = teColumnName + "_te" ; Frame encodingMapForCurrentTEColumn = columnToEncodingMap . get ( teColumnName ) ; double priorMeanFromTrainingDataset = calculatePriorMean ( encodingMapForCurrentTEColumn ) ; int teColumnIndex = dataWithAllEncodings . find ( teColumnName ) ; switch ( dataLeakageHandlingStrategy ) { case DataLeakageHandlingStrategy . KFold : Frame holdoutEncodeMap = null ; Frame dataWithMergedAggregationsK = null ; try { if ( foldColumnName == null ) throw new IllegalStateException ( "`foldColumn` must be provided for dataLeakageHandlingStrategy = KFold" ) ; int teColumnIndexInEncodingMap = encodingMapForCurrentTEColumn . find ( teColumnName ) ; int foldColumnIndex = dataWithAllEncodings . find ( foldColumnName ) ; long [ ] foldValues = getUniqueValuesOfTheFoldColumn ( encodingMapForCurrentTEColumn , 1 ) ; Scope . enter ( ) ; // Following part is actually a preparation phase for KFold case . Maybe we should move it to prepareEncodingMap method . try { for ( long foldValue : foldValues ) { Frame outOfFoldData = getOutOfFoldData ( encodingMapForCurrentTEColumn , foldColumnName , foldValue ) ; Frame groupedByTEColumnAndAggregate = groupByTEColumnAndAggregate ( outOfFoldData , teColumnIndexInEncodingMap ) ; renameColumn ( groupedByTEColumnAndAggregate , "sum_numerator" , "numerator" ) ; renameColumn ( groupedByTEColumnAndAggregate , "sum_denominator" , "denominator" ) ; Frame groupedWithAppendedFoldColumn = addCon ( groupedByTEColumnAndAggregate , "foldValueForMerge" , foldValue ) ; if ( holdoutEncodeMap == null ) { holdoutEncodeMap = groupedWithAppendedFoldColumn ; } else { Frame newHoldoutEncodeMap = rBind ( holdoutEncodeMap , groupedWithAppendedFoldColumn ) ; holdoutEncodeMap . delete ( ) ; holdoutEncodeMap = newHoldoutEncodeMap ; } outOfFoldData . delete ( ) ; Scope . track ( groupedWithAppendedFoldColumn ) ; } } finally { Scope . exit ( ) ; } // End of the preparation phase dataWithMergedAggregationsK = mergeByTEAndFoldColumns ( dataWithAllEncodings , holdoutEncodeMap , teColumnIndex , foldColumnIndex , teColumnIndexInEncodingMap ) ; Frame withEncodingsFrameK = calculateEncoding ( dataWithMergedAggregationsK , encodingMapForCurrentTEColumn , targetColumnName , newEncodedColumnName , withBlendedAvg ) ; Frame withAddedNoiseEncodingsFrameK = applyNoise ( withEncodingsFrameK , newEncodedColumnName , noiseLevel , seed ) ; // Cases when we can introduce NA ' s : // 1 ) if column is represented only in one fold then during computation of out - of - fold subsets we will get empty aggregations . // When merging with the original dataset we will get NA ' a on the right side // Note : since we create encoding based on training dataset and use KFold mainly when we apply encoding to the training set , // there is zero probability that we haven ' t seen some category . Frame imputedEncodingsFrameK = imputeWithMean ( withAddedNoiseEncodingsFrameK , withAddedNoiseEncodingsFrameK . find ( newEncodedColumnName ) , priorMeanFromTrainingDataset ) ; removeNumeratorAndDenominatorColumns ( imputedEncodingsFrameK ) ; dataWithAllEncodings . delete ( ) ; dataWithAllEncodings = imputedEncodingsFrameK ; } catch ( Exception ex ) { if ( dataWithMergedAggregationsK != null ) dataWithMergedAggregationsK . delete ( ) ; throw ex ; } finally { if ( holdoutEncodeMap != null ) holdoutEncodeMap . delete ( ) ; } break ; case DataLeakageHandlingStrategy . LeaveOneOut : Frame groupedTargetEncodingMap = null ; Frame dataWithMergedAggregationsL = null ; try { foldColumnIsInEncodingMapCheck ( foldColumnName , encodingMapForCurrentTEColumn ) ; groupedTargetEncodingMap = groupingIgnoringFordColumn ( foldColumnName , encodingMapForCurrentTEColumn , teColumnName ) ; int teColumnIndexInGroupedEncodingMap = groupedTargetEncodingMap . find ( teColumnName ) ; dataWithMergedAggregationsL = mergeByTEColumn ( dataWithAllEncodings , groupedTargetEncodingMap , teColumnIndex , teColumnIndexInGroupedEncodingMap ) ; Frame subtractedFrameL = subtractTargetValueForLOO ( dataWithMergedAggregationsL , targetColumnName ) ; Frame withEncodingsFrameL = calculateEncoding ( subtractedFrameL , groupedTargetEncodingMap , targetColumnName , newEncodedColumnName , withBlendedAvg ) ; // do we really need to pass groupedTargetEncodingMap again ? Frame withAddedNoiseEncodingsFrameL = applyNoise ( withEncodingsFrameL , newEncodedColumnName , noiseLevel , seed ) ; // Cases when we can introduce NA ' s : // 1 ) Only in case when our encoding map has not seen some category . / / TODO move second parameter into the function Frame imputedEncodingsFrameL = imputeWithMean ( withAddedNoiseEncodingsFrameL , withAddedNoiseEncodingsFrameL . find ( newEncodedColumnName ) , priorMeanFromTrainingDataset ) ; removeNumeratorAndDenominatorColumns ( imputedEncodingsFrameL ) ; dataWithAllEncodings . delete ( ) ; dataWithAllEncodings = imputedEncodingsFrameL ; } catch ( Exception ex ) { if ( dataWithMergedAggregationsL != null ) dataWithMergedAggregationsL . delete ( ) ; throw ex ; } finally { if ( groupedTargetEncodingMap != null ) groupedTargetEncodingMap . delete ( ) ; } break ; case DataLeakageHandlingStrategy . None : Frame groupedTargetEncodingMapForNone = null ; Frame dataWithMergedAggregationsN = null ; try { foldColumnIsInEncodingMapCheck ( foldColumnName , encodingMapForCurrentTEColumn ) ; groupedTargetEncodingMapForNone = groupingIgnoringFordColumn ( foldColumnName , encodingMapForCurrentTEColumn , teColumnName ) ; int teColumnIndexInGroupedEncodingMapNone = groupedTargetEncodingMapForNone . find ( teColumnName ) ; dataWithMergedAggregationsN = mergeByTEColumn ( dataWithAllEncodings , groupedTargetEncodingMapForNone , teColumnIndex , teColumnIndexInGroupedEncodingMapNone ) ; Frame withEncodingsFrameN = calculateEncoding ( dataWithMergedAggregationsN , groupedTargetEncodingMapForNone , targetColumnName , newEncodedColumnName , withBlendedAvg ) ; Frame withAddedNoiseEncodingsFrameN = applyNoise ( withEncodingsFrameN , newEncodedColumnName , noiseLevel , seed ) ; // In cases when encoding has not seen some levels we will impute NAs with mean computed from training set . Mean is a dataleakage btw . // Note : In case of creating encoding map based on the holdout set we ' d better use stratified sampling . // Maybe even choose size of holdout taking into account size of the minimal set that represents all levels . // Otherwise there are higher chances to get NA ' s for unseen categories . Frame imputedEncodingsFrameN = imputeWithMean ( withAddedNoiseEncodingsFrameN , withAddedNoiseEncodingsFrameN . find ( newEncodedColumnName ) , priorMeanFromTrainingDataset ) ; removeNumeratorAndDenominatorColumns ( imputedEncodingsFrameN ) ; dataWithAllEncodings . delete ( ) ; dataWithAllEncodings = imputedEncodingsFrameN ; } catch ( Exception ex ) { if ( dataWithMergedAggregationsN != null ) dataWithMergedAggregationsN . delete ( ) ; throw ex ; } finally { if ( groupedTargetEncodingMapForNone != null ) groupedTargetEncodingMapForNone . delete ( ) ; } } } return dataWithAllEncodings ; } catch ( Exception ex ) { if ( dataWithAllEncodings != null ) dataWithAllEncodings . delete ( ) ; throw ex ; }
public class CalendarParserImpl { /** * Reads the next token from the tokeniser . * This method throws a ParseException when reading EOF . * @ param tokeniser * @ param in * @ param ignoreEOF * @ return int value of the ttype field of the tokeniser * @ throws ParseException When reading EOF . */ private int nextToken ( StreamTokenizer tokeniser , Reader in , boolean ignoreEOF ) throws IOException , ParserException { } }
int token = tokeniser . nextToken ( ) ; if ( ! ignoreEOF && token == StreamTokenizer . TT_EOF ) { throw new ParserException ( "Unexpected end of file" , getLineNumber ( tokeniser , in ) ) ; } return token ;
public class FileBasedNamespaceMappings { /** * Returns a prefix for the namespace < code > uri < / code > . If a namespace * mapping exists , the already known prefix is returned ; otherwise a new * prefix is created and assigned to the namespace uri . * @ param uri the namespace uri . * @ return the prefix for the namespace uri . * @ throws NamespaceException if an yet unknown namespace uri / prefix * mapping could not be stored . */ public synchronized String getNamespacePrefixByURI ( String uri ) throws NamespaceException { } }
String prefix = uriToPrefix . get ( uri ) ; if ( prefix == null ) { // make sure prefix is not taken while ( prefixToURI . get ( String . valueOf ( prefixCount ) ) != null ) { prefixCount ++ ; } prefix = String . valueOf ( prefixCount ) ; prefixToURI . put ( prefix , uri ) ; uriToPrefix . put ( uri , prefix ) ; log . debug ( "adding new namespace mapping: " + prefix + " -> " + uri ) ; try { store ( ) ; } catch ( IOException e ) { throw new NamespaceException ( "Could not obtain a prefix for uri: " + uri , e ) ; } } return prefix ;
public class OutputsInner { /** * Creates an output or replaces an already existing output under an existing streaming job . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ param outputName The name of the output . * @ param output The definition of the output that will be used to create a new output or replace the existing one under the streaming job . * @ param ifMatch The ETag of the output . Omit this value to always overwrite the current output . Specify the last - seen ETag value to prevent accidentally overwritting concurrent changes . * @ param ifNoneMatch Set to ' * ' to allow a new output to be created , but to prevent updating an existing output . Other values will result in a 412 Pre - condition Failed response . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the OutputInner object */ public Observable < OutputInner > createOrReplaceAsync ( String resourceGroupName , String jobName , String outputName , OutputInner output , String ifMatch , String ifNoneMatch ) { } }
return createOrReplaceWithServiceResponseAsync ( resourceGroupName , jobName , outputName , output , ifMatch , ifNoneMatch ) . map ( new Func1 < ServiceResponseWithHeaders < OutputInner , OutputsCreateOrReplaceHeaders > , OutputInner > ( ) { @ Override public OutputInner call ( ServiceResponseWithHeaders < OutputInner , OutputsCreateOrReplaceHeaders > response ) { return response . body ( ) ; } } ) ;
public class AnalyzeLocal { /** * Get a list of unique values from the specified columns of a sequence * @ param columnNames Name of the columns to get unique values from * @ param schema Data schema * @ param sequenceData Sequence data to get unique values from * @ return */ public static Map < String , Set < Writable > > getUniqueSequence ( List < String > columnNames , Schema schema , SequenceRecordReader sequenceData ) { } }
Map < String , Set < Writable > > m = new HashMap < > ( ) ; for ( String s : columnNames ) { m . put ( s , new HashSet < > ( ) ) ; } while ( sequenceData . hasNext ( ) ) { List < List < Writable > > next = sequenceData . sequenceRecord ( ) ; for ( List < Writable > step : next ) { for ( String s : columnNames ) { int idx = schema . getIndexOfColumn ( s ) ; m . get ( s ) . add ( step . get ( idx ) ) ; } } } return m ;
public class StructureImpl { /** * { @ inheritDoc } */ @ Override public Group findGroup ( String chainName , String pdbResnum , int modelnr ) throws StructureException { } }
// if structure is xray there will be only one " model " . if ( modelnr > models . size ( ) ) throw new StructureException ( " no model nr " + modelnr + " in this structure. (contains " + models . size ( ) + ")" ) ; // first we need to gather all groups with the author id chainName : polymers , non - polymers and waters Chain polyChain = getPolyChainByPDB ( chainName , modelnr ) ; if ( polyChain != null ) { List < Group > groups = new ArrayList < > ( ) ; groups . addAll ( polyChain . getAtomGroups ( ) ) ; // there can be more than one non - poly chain for a given author id for ( Chain chain : getNonPolyChainsByPDB ( chainName , modelnr ) ) { groups . addAll ( chain . getAtomGroups ( ) ) ; } Chain water = getWaterChainByPDB ( chainName , modelnr ) ; if ( water != null ) groups . addAll ( water . getAtomGroups ( ) ) ; // now iterate over all groups // in order to find the amino acid that has this pdbRenum . for ( Group g : groups ) { String rnum = g . getResidueNumber ( ) . toString ( ) ; // System . out . println ( g + " > " + rnum + " < > " + pdbResnum + " < " ) ; // we only mutate amino acids // and ignore hetatoms and nucleotides in this case if ( rnum . equals ( pdbResnum ) ) { return g ; } } } throw new StructureException ( "could not find group " + pdbResnum + " in chain " + chainName ) ;
public class HamcrestMatchers { /** * Creates a matcher for { @ linkplain Iterable } s that matches when a single pass over the examined * { @ linkplain Iterable } yields a series of items , that contains items logically equal to the corresponding item in * the specified items , in the same relative order For example : < br / > * < p > For example : * < pre > * / / Arrange * Iterable < String > actual = Arrays . asList ( " a " , " b " , " c " , " d " ) ; * Iterable < String > expected = Arrays . asList ( " a " , " c " ) ; * / / Assert * assertThat ( actual , containsInRelativeOrder ( expected ) ) ; * < / pre > * @ param items the items that must be contained within items provided by an examined { @ linkplain Iterable } in the * same relative order */ public static < T > Matcher < Iterable < ? extends T > > containsInRelativeOrder ( final Iterable < T > items ) { } }
return IsIterableContainingInRelativeOrder . containsInRelativeOrder ( items ) ;
public class TiffReader { /** * Check tag type . * @ param tagid the tagid * @ param tagType the tag type * @ param n the n */ private boolean checkType ( int tagid , int tagType , int n ) { } }
if ( TiffTags . hasTag ( tagid ) && ! TiffTags . getTag ( tagid ) . getName ( ) . equals ( "IPTC" ) ) { boolean found = false ; String stagType = TiffTags . getTagTypeName ( tagType ) ; if ( stagType != null ) { if ( stagType . equals ( "SUBIFD" ) ) stagType = "IFD" ; if ( stagType . equals ( "UNDEFINED" ) ) stagType = "BYTE" ; for ( String vType : TiffTags . getTag ( tagid ) . getType ( ) ) { String vType2 = vType ; if ( vType2 . equals ( "UNDEFINED" ) ) vType2 = "BYTE" ; if ( vType2 . equals ( stagType ) ) { found = true ; } } } if ( ! found ) { validation . addError ( "Incorrect type for tag " + TiffTags . getTag ( tagid ) . getName ( ) , "IFD" + n , stagType ) ; return false ; } return true ; } return false ;
public class HadoopDFSRule { /** * Copies the content of the given resource into Hadoop . * @ param filename File to be created * @ param resource Resource to copy * @ throws IOException Anything */ public void copyResource ( String filename , String resource ) throws IOException { } }
write ( filename , IOUtils . toByteArray ( getClass ( ) . getResource ( resource ) ) ) ;
public class BaseHybridHashTable { /** * The level parameter is needed so that we can have different hash functions when we * recursively apply the partitioning , so that the working set eventually fits into memory . */ public static int hash ( int hashCode , int level ) { } }
final int rotation = level * 11 ; int code = Integer . rotateLeft ( hashCode , rotation ) ; return code >= 0 ? code : - ( code + 1 ) ;
public class AbstractHibernateCriteriaBuilder { /** * Groovy moves the map to the first parameter if using the idiomatic form , e . g . * < code > eq ' firstName ' , ' Fred ' , ignoreCase : true < / code > . * @ param params optional map with customization parameters ; currently only ' ignoreCase ' is supported . * @ param propertyName * @ param propertyValue * @ return A Criterion instance */ @ SuppressWarnings ( "rawtypes" ) public org . grails . datastore . mapping . query . api . Criteria eq ( Map params , String propertyName , Object propertyValue ) { } }
return eq ( propertyName , propertyValue , params ) ;
public class DaoService { /** * / * ( non - Javadoc ) * @ see org . esupportail . smsuapi . dao . DaoService # deleteSmsOlderThan ( java . util . Date ) */ public int deleteSmsOlderThan ( final Date date ) { } }
final String hql = "delete from Sms as sms where sms.Date < :date" ; final Query query = getCurrentSession ( ) . createQuery ( hql ) ; query . setTimestamp ( "date" , date ) ; final int nbSmsDeleted = query . executeUpdate ( ) ; return nbSmsDeleted ;
public class JCudnn { /** * < pre > * Derives a tensor descriptor from layer data descriptor for BatchNormalization * scale , invVariance , bnBias , bnScale tensors . Use this tensor desc for * bnScaleBiasMeanVarDesc and bnScaleBiasDiffDesc in Batch Normalization forward and backward functions . * < / pre > */ public static int cudnnDeriveBNTensorDescriptor ( cudnnTensorDescriptor derivedBnDesc , cudnnTensorDescriptor xDesc , int mode ) { } }
return checkResult ( cudnnDeriveBNTensorDescriptorNative ( derivedBnDesc , xDesc , mode ) ) ;
public class RulesPlugin { /** * Figure out which classes extend the named class and return the current class and the extenders * @ param className * @ param engineMetadata * @ return list of class references */ private List < ClassReference > getClassReferences ( String className ) { } }
List < ClassReference > ret = new ArrayList < ClassReference > ( ) ; ClassReference cr = m_classReferenceMap . get ( className ) ; ret . addAll ( cr . getChildren ( ) ) ; return ret ;
public class JmsSyncProducer { /** * Retrieve the reply destination either by injected instance , destination name or * by creating a new temporary destination . * @ param session current JMS session * @ param message holding possible reply destination in header . * @ return the reply destination . * @ throws JMSException */ private Destination getReplyDestination ( Session session , Message message ) throws JMSException { } }
if ( message . getHeader ( org . springframework . messaging . MessageHeaders . REPLY_CHANNEL ) != null ) { if ( message . getHeader ( org . springframework . messaging . MessageHeaders . REPLY_CHANNEL ) instanceof Destination ) { return ( Destination ) message . getHeader ( org . springframework . messaging . MessageHeaders . REPLY_CHANNEL ) ; } else { return resolveDestinationName ( message . getHeader ( org . springframework . messaging . MessageHeaders . REPLY_CHANNEL ) . toString ( ) , session ) ; } } else if ( endpointConfiguration . getReplyDestination ( ) != null ) { return endpointConfiguration . getReplyDestination ( ) ; } else if ( StringUtils . hasText ( endpointConfiguration . getReplyDestinationName ( ) ) ) { return resolveDestinationName ( endpointConfiguration . getReplyDestinationName ( ) , session ) ; } if ( endpointConfiguration . isPubSubDomain ( ) && session instanceof TopicSession ) { return session . createTemporaryTopic ( ) ; } else { return session . createTemporaryQueue ( ) ; }