signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class Config { /** * Get configuration object property value or null if there is no property with requested name .
* @ param name property name .
* @ return configuration object property value or null .
* @ throws IllegalArgumentException if < code > name < / code > argument is null or empty . */
public String getProperty ( String name ) { } }
|
Params . notNullOrEmpty ( name , "Property name" ) ; usedProperties . add ( name ) ; return properties . getProperty ( name ) ;
|
public class ChoiceFormat { /** * Set the choices to be used in formatting .
* @ param limits contains the top value that you want
* parsed with that format , and should be in ascending sorted order . When
* formatting X , the choice will be the i , where
* limit [ i ] & lt ; = X & lt ; limit [ i + 1 ] .
* If the limit array is not in ascending order , the results of formatting
* will be incorrect .
* @ param formats are the formats you want to use for each limit .
* They can be either Format objects or Strings .
* When formatting with object Y ,
* if the object is a NumberFormat , then ( ( NumberFormat ) Y ) . format ( X )
* is called . Otherwise Y . toString ( ) is called . */
public void setChoices ( double [ ] limits , String formats [ ] ) { } }
|
if ( limits . length != formats . length ) { throw new IllegalArgumentException ( "Array and limit arrays must be of the same length." ) ; } choiceLimits = limits ; choiceFormats = formats ;
|
public class RocksDbWrapper { /** * Gets a value from a column family .
* @ param cfName
* @ param key
* @ return
* @ throws RocksDbException */
public byte [ ] get ( String cfName , String key ) throws RocksDbException { } }
|
return get ( cfName , readOptions , key ) ;
|
public class PrefixedPropertyOverrideConfigurer { /** * Gets the effective properties .
* @ return the effective properties */
@ ManagedAttribute ( ) public List < String > getEffectiveProperties ( ) { } }
|
final List < String > properties = new LinkedList < String > ( ) ; for ( final String key : myProperties . stringPropertyNames ( ) ) { properties . add ( key + "=" + myProperties . get ( key ) ) ; } return properties ;
|
public class FullSegmentation { /** * 获取文本的所有可能切分结果
* @ param text 文本
* @ return 全切分结果 */
private List < Word > [ ] fullSeg ( String text ) { } }
|
// 文本长度
final int textLen = text . length ( ) ; // 以每一个字作为词的开始 , 所能切分的词
final List < String > [ ] sequence = new LinkedList [ textLen ] ; if ( isParallelSeg ( ) ) { // 并行化
List < Integer > list = new ArrayList < > ( textLen ) ; for ( int i = 0 ; i < textLen ; i ++ ) { list . add ( i ) ; } list . parallelStream ( ) . forEach ( i -> sequence [ i ] = fullSeg ( text , i ) ) ; } else { // 串行化
for ( int i = 0 ; i < textLen ; i ++ ) { sequence [ i ] = fullSeg ( text , i ) ; } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "全切分中间结果:" ) ; int i = 1 ; for ( List < String > list : sequence ) { LOGGER . debug ( "\t" + ( i ++ ) + "、" + list ) ; } } // 树叶
List < Node > leaf = new LinkedList < > ( ) ; for ( String word : sequence [ 0 ] ) { // 树根
Node node = new Node ( word ) ; // 把全切分中间结果 ( 二维数组 ) 转换为合理切分
buildNode ( node , sequence , word . length ( ) , leaf ) ; } // 清理无用数据
for ( int j = 0 ; j < sequence . length ; j ++ ) { sequence [ j ] . clear ( ) ; sequence [ j ] = null ; } // 从所有树叶开始反向遍历出全切分结果
List < Word > [ ] res = toWords ( leaf ) ; leaf . clear ( ) ; return res ;
|
public class Tokens { /** * Returns { @ code true } if the two tokens are adjacent in the input stream with no intervening
* characters . */
static boolean areAdjacent ( Token first , Token second ) { } }
|
return first . endLine == second . beginLine && first . endColumn == second . beginColumn - 1 ;
|
public class DeploymentScenario { /** * Validate that a deployment of same type is not already added */
private void validateNotSameNameAndTypeOfDeployment ( DeploymentDescription deployment ) { } }
|
for ( Deployment existing : deployments ) { if ( existing . getDescription ( ) . getName ( ) . equals ( deployment . getName ( ) ) ) { if ( ( existing . getDescription ( ) . isArchiveDeployment ( ) && deployment . isArchiveDeployment ( ) ) || ( existing . getDescription ( ) . isDescriptorDeployment ( ) && deployment . isDescriptorDeployment ( ) ) ) { throw new IllegalArgumentException ( "Can not add multiple " + Archive . class . getName ( ) + " deployments with the same name: " + deployment . getName ( ) ) ; } } }
|
public class BeanDefinitionParser { /** * parseCollectionElements .
* @ param elementNodes a { @ link org . w3c . dom . NodeList } object .
* @ param target a { @ link java . util . Collection } object .
* @ param bd a { @ link org . springframework . beans . factory . config . BeanDefinition } object .
* @ param defaultElementType a { @ link java . lang . String } object . */
protected void parseCollectionElements ( NodeList elementNodes , Collection < Object > target , BeanDefinition bd , String defaultElementType ) { } }
|
for ( int i = 0 ; i < elementNodes . getLength ( ) ; i ++ ) { Node node = elementNodes . item ( i ) ; if ( node instanceof Element && ! nodeNameEquals ( node , DESCRIPTION_ELEMENT ) ) target . add ( parsePropertySubElement ( ( Element ) node , bd , defaultElementType ) ) ; }
|
public class Reflection { /** * Utility method that allows to extract actual annotation from field , bypassing LibGDX annotation wrapper . Returns
* null if annotation is not present .
* @ param field might be annotated .
* @ param annotationType class of the annotation .
* @ return an instance of the annotation if the field is annotated or null if not .
* @ param < Type > type of annotation . */
public static < Type extends Annotation > Type getAnnotation ( final Field field , final Class < Type > annotationType ) { } }
|
if ( isAnnotationPresent ( field , annotationType ) ) { return field . getDeclaredAnnotation ( annotationType ) . getAnnotation ( annotationType ) ; } return null ;
|
public class AbstractQueueJsonDeserializer { /** * < p > newInstance < / p >
* @ param deserializer { @ link JsonDeserializer } used to deserialize the objects inside the { @ link AbstractQueue } .
* @ param < T > Type of the elements inside the { @ link AbstractQueue }
* @ return a new instance of { @ link AbstractQueueJsonDeserializer } */
public static < T > AbstractQueueJsonDeserializer < T > newInstance ( JsonDeserializer < T > deserializer ) { } }
|
return new AbstractQueueJsonDeserializer < T > ( deserializer ) ;
|
public class LoadAllOperation { /** * Filters the { @ link # keys } list for keys matching the partition on
* which this operation is executed .
* @ return the filtered key list */
private List < Data > selectThisPartitionsKeys ( ) { } }
|
final IPartitionService partitionService = mapServiceContext . getNodeEngine ( ) . getPartitionService ( ) ; final int partitionId = getPartitionId ( ) ; List < Data > dataKeys = null ; for ( Data key : keys ) { if ( partitionId == partitionService . getPartitionId ( key ) ) { if ( dataKeys == null ) { dataKeys = new ArrayList < > ( keys . size ( ) ) ; } dataKeys . add ( key ) ; } } if ( dataKeys == null ) { return Collections . emptyList ( ) ; } return dataKeys ;
|
public class JmolSymmetryScriptGeneratorH { /** * Returns a Jmol script to set the default orientation for a structure
* @ return Jmol script */
@ Override public String getDefaultOrientation ( ) { } }
|
StringBuilder s = new StringBuilder ( ) ; s . append ( setCentroid ( ) ) ; Quat4d q = new Quat4d ( ) ; q . set ( helixAxisAligner . getRotationMatrix ( ) ) ; // set orientation
s . append ( "moveto 0 quaternion{" ) ; s . append ( jMolFloat ( q . x ) ) ; s . append ( "," ) ; s . append ( jMolFloat ( q . y ) ) ; s . append ( "," ) ; s . append ( jMolFloat ( q . z ) ) ; s . append ( "," ) ; s . append ( jMolFloat ( q . w ) ) ; s . append ( "};" ) ; return s . toString ( ) ;
|
public class SibRaDestinationSession { /** * Checks that the parent connection has not been invalidated by the
* connection manager .
* @ throws SISessionUnavailableException
* if the connection has been invalidated */
protected void checkValid ( ) throws SISessionUnavailableException { } }
|
if ( ! _parentConnection . isValid ( ) ) { final SISessionUnavailableException exception = new SISessionUnavailableException ( NLS . getString ( "INVALID_SESSION_CWSIV0200" ) ) ; if ( TRACE . isEventEnabled ( ) ) { SibTr . exception ( this , TRACE , exception ) ; } throw exception ; }
|
public class Swarm { /** * Retrieve the default ShrinkWrap deployment .
* @ return The default deployment , unmodified . */
public Archive < ? > createDefaultDeployment ( ) throws Exception { } }
|
if ( this . server == null ) { throw SwarmMessages . MESSAGES . containerNotStarted ( "createDefaultDeployment()" ) ; } return this . server . deployer ( ) . createDefaultDeployment ( ) ;
|
public class Gen { /** * Derived visitor method : generate code for a list of statements . */
public void genStats ( List < ? extends JCTree > trees , Env < GenContext > env ) { } }
|
for ( List < ? extends JCTree > l = trees ; l . nonEmpty ( ) ; l = l . tail ) genStat ( l . head , env , CRT_STATEMENT ) ;
|
public class AbstractClasspathScanner { /** * This method gets the singleton instance of this { @ link ClasspathScanner } . < br >
* < b > ATTENTION : < / b > < br >
* Please prefer dependency - injection instead of using this method .
* @ return the singleton instance . */
public static ClasspathScanner getInstance ( ) { } }
|
if ( instance == null ) { synchronized ( AbstractClasspathScanner . class ) { if ( instance == null ) { ClasspathScannerImpl impl = new ClasspathScannerImpl ( ) ; impl . initialize ( ) ; } } } return instance ;
|
public class Bits { /** * available via default BitStore method */
static Positions newDisjointPositions ( Matches matches ) { } }
|
if ( matches == null ) throw new IllegalArgumentException ( "null matches" ) ; return new BitStorePositions ( matches , true , 0 ) ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcBooleanResult ( ) { } }
|
if ( ifcBooleanResultEClass == null ) { ifcBooleanResultEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 46 ) ; } return ifcBooleanResultEClass ;
|
public class CmsDriverManager { /** * Unlocks a resource . < p >
* @ param dbc the current database context
* @ param resource the resource to unlock
* @ param force < code > true < / code > , if a resource is forced to get unlocked , no matter by which user and in which project the resource is currently locked
* @ param removeSystemLock < code > true < / code > , if you also want to remove system locks
* @ throws CmsException if something goes wrong
* @ see CmsObject # unlockResource ( String )
* @ see I _ CmsResourceType # unlockResource ( CmsObject , CmsSecurityManager , CmsResource ) */
public void unlockResource ( CmsDbContext dbc , CmsResource resource , boolean force , boolean removeSystemLock ) throws CmsException { } }
|
// update the resource cache
m_monitor . clearResourceCache ( ) ; // now update lock status
m_lockManager . removeResource ( dbc , resource , force , removeSystemLock ) ; // we must also clear the permission cache
m_monitor . flushCache ( CmsMemoryMonitor . CacheType . PERMISSION ) ; // fire resource modification event
Map < String , Object > data = new HashMap < String , Object > ( 2 ) ; data . put ( I_CmsEventListener . KEY_RESOURCE , resource ) ; data . put ( I_CmsEventListener . KEY_CHANGE , new Integer ( NOTHING_CHANGED ) ) ; OpenCms . fireCmsEvent ( new CmsEvent ( I_CmsEventListener . EVENT_RESOURCE_MODIFIED , data ) ) ;
|
public class ZookeeperPublishHandler { /** * 发布服务
* @ param descs
* 所有的服务描述
* @ return 发布成功与否
* @ see com . baidu . beidou . navi . server . locator . PublishHandler # publish ( java . util . Collection ) */
@ Override public < KEY > boolean publish ( Collection < MethodDescriptor < KEY > > descs ) { } }
|
if ( CollectionUtil . isEmpty ( descs ) ) { LOG . warn ( "No service to publish" ) ; return false ; } List < String > methodNames = CollectionUtil . transform ( descs , new Function < MethodDescriptor < KEY > , String > ( ) { @ Override public String apply ( MethodDescriptor < KEY > input ) { return String . format ( "%s.%s(..)" , input . getInterfClass ( ) . getSimpleName ( ) , input . getMethod ( ) . getName ( ) ) ; } } ) ; List < String > serviceNames = CollectionUtil . transform ( descs , new Function < MethodDescriptor < KEY > , String > ( ) { @ Override public String apply ( MethodDescriptor < KEY > input ) { return input . getInterfClass ( ) . getSimpleName ( ) ; } } ) ; Set < String > distinctServices = new HashSet < String > ( serviceNames ) ; Collections . sort ( methodNames ) ; Collections . sort ( serviceNames ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Export rpc service methods which may include unneccessary methods: " + Arrays . toString ( methodNames . toArray ( new String [ ] { } ) ) ) ; LOG . debug ( "Export total " + methodNames . size ( ) + " rpc service methods" ) ; } LOG . info ( "Export rpc services: " + Arrays . toString ( distinctServices . toArray ( new String [ ] { } ) ) ) ; LOG . info ( "Export total " + distinctServices . size ( ) + " rpc services" ) ; if ( ! RpcServerConf . ENABLE_ZK_REGISTRY ) { LOG . info ( "Export services soley at localhost NOT register at zookeeper" ) ; return false ; } if ( ! doConnectAndCreateZkPath ( ) ) { LOG . info ( "Export services failed at zookeeper and exit publishing" ) ; return false ; } String localIp = ZkRegisterInfoUtil . getLocalHostIp ( ) ; String localHostname = ZkRegisterInfoUtil . getLocalHostName ( ) ; // override configuration files ' port
RpcServerConf . SERVER_PORT = Integer . valueOf ( ZkRegisterInfoUtil . getLocalHostPort ( ) ) ; List < String > failedRegistryServiceList = CollectionUtil . createArrayList ( ) ; for ( String service : distinctServices ) { String zkPath = ZkPathUtil . buildPath ( NaviCommonConstant . ZOOKEEPER_BASE_PATH , RpcServerConf . ZK_REGISTRY_NAMESPACE , service , localIp + ":" + RpcServerConf . SERVER_PORT ) ; try { if ( zkClient . exists ( zkPath ) != null ) { zkClient . delete ( zkPath ) ; } zkClient . createSessionNodeForRecursive ( zkPath , localHostname . getBytes ( ) ) ; LOG . info ( "Service registers at Zookeeper successfully - " + zkPath ) ; } catch ( NoNodeException e ) { LOG . warn ( "Zookeeper path cannot found - " + zkPath ) ; } catch ( NodeExistsException e ) { LOG . warn ( "Zookeeper path is already exists - " + zkPath ) ; } catch ( Exception e ) { LOG . error ( "Zookeeper create path failed for " + zkPath , e ) ; failedRegistryServiceList . add ( zkPath ) ; } } if ( CollectionUtil . isEmpty ( failedRegistryServiceList ) ) { LOG . info ( "Registry all " + ( distinctServices . size ( ) - failedRegistryServiceList . size ( ) ) + " services to zookeeper successfully" ) ; } else { LOG . info ( "Registry services to zookeeper encounter some problems, the failure registry paths are " + Arrays . toString ( failedRegistryServiceList . toArray ( new String [ ] { } ) ) ) ; } LOG . info ( "Publish for the " + republishCount . incrementAndGet ( ) + " times" ) ; return true ;
|
public class OutputManager { /** * adds output extension to desired outputPlugin
* adds output extension to desired outputPlugin , so that the output - plugin can start and stop the outputExtension
* task as needed . The outputExtension is specific to the output - plugin
* @ param outputExtension the outputExtension to be added
* @ throws IllegalIDException not yet implemented */
public void addOutputExtension ( OutputExtensionModel < ? , ? > outputExtension ) throws IllegalIDException { } }
|
if ( outputExtensions . containsKey ( outputExtension . getPluginId ( ) ) ) { outputExtensions . get ( outputExtension . getPluginId ( ) ) . add ( outputExtension ) ; } else { IdentifiableSet < OutputExtensionModel < ? , ? > > outputExtensionList = new IdentifiableSet < > ( ) ; outputExtensionList . add ( outputExtension ) ; outputExtensions . put ( outputExtension . getPluginId ( ) , outputExtensionList ) ; } IdentificationManager . getInstance ( ) . getIdentification ( outputExtension ) . ifPresent ( id -> outputPlugins . stream ( ) . filter ( outputPlugin -> outputPlugin . getID ( ) . equals ( outputExtension . getPluginId ( ) ) ) . forEach ( outputPlugin -> outputPlugin . outputExtensionAdded ( id ) ) ) ;
|
public class AmazonMachineLearningClient { /** * Creates a < code > DataSource < / code > object . A < code > DataSource < / code > references data that can be used to perform
* < code > CreateMLModel < / code > , < code > CreateEvaluation < / code > , or < code > CreateBatchPrediction < / code > operations .
* < code > CreateDataSourceFromS3 < / code > is an asynchronous operation . In response to
* < code > CreateDataSourceFromS3 < / code > , Amazon Machine Learning ( Amazon ML ) immediately returns and sets the
* < code > DataSource < / code > status to < code > PENDING < / code > . After the < code > DataSource < / code > has been created and is
* ready for use , Amazon ML sets the < code > Status < / code > parameter to < code > COMPLETED < / code > .
* < code > DataSource < / code > in the < code > COMPLETED < / code > or < code > PENDING < / code > state can be used to perform only
* < code > CreateMLModel < / code > , < code > CreateEvaluation < / code > or < code > CreateBatchPrediction < / code > operations .
* If Amazon ML can ' t accept the input source , it sets the < code > Status < / code > parameter to < code > FAILED < / code > and
* includes an error message in the < code > Message < / code > attribute of the < code > GetDataSource < / code > operation
* response .
* The observation data used in a < code > DataSource < / code > should be ready to use ; that is , it should have a
* consistent structure , and missing data values should be kept to a minimum . The observation data must reside in
* one or more . csv files in an Amazon Simple Storage Service ( Amazon S3 ) location , along with a schema that
* describes the data items by name and type . The same schema must be used for all of the data files referenced by
* the < code > DataSource < / code > .
* After the < code > DataSource < / code > has been created , it ' s ready to use in evaluations and batch predictions . If
* you plan to use the < code > DataSource < / code > to train an < code > MLModel < / code > , the < code > DataSource < / code > also
* needs a recipe . A recipe describes how each input variable will be used in training an < code > MLModel < / code > . Will
* the variable be included or excluded from training ? Will the variable be manipulated ; for example , will it be
* combined with another variable or will it be split apart into word combinations ? The recipe provides answers to
* these questions .
* @ param createDataSourceFromS3Request
* @ return Result of the CreateDataSourceFromS3 operation returned by the service .
* @ throws InvalidInputException
* An error on the client occurred . Typically , the cause is an invalid input value .
* @ throws InternalServerException
* An error on the server occurred when trying to process a request .
* @ throws IdempotentParameterMismatchException
* A second request to use or change an object was not allowed . This can result from retrying a request
* using a parameter that was not present in the original request .
* @ sample AmazonMachineLearning . CreateDataSourceFromS3 */
@ Override public CreateDataSourceFromS3Result createDataSourceFromS3 ( CreateDataSourceFromS3Request request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateDataSourceFromS3 ( request ) ;
|
public class AbstractPlainDatagramSocketImpl { /** * Return the first address bound to NetworkInterface with given ID .
* In case of niIndex = = 0 or no address return anyLocalAddress */
static InetAddress getNIFirstAddress ( int niIndex ) throws SocketException { } }
|
if ( niIndex > 0 ) { NetworkInterface networkInterface = NetworkInterface . getByIndex ( niIndex ) ; Enumeration < InetAddress > addressesEnum = networkInterface . getInetAddresses ( ) ; if ( addressesEnum . hasMoreElements ( ) ) { return addressesEnum . nextElement ( ) ; } } return InetAddress . anyLocalAddress ( ) ;
|
public class XmppHostnameVerifier { /** * Returns true if the name matches against the template that may contain the wildcard char ' * ' .
* @ param name
* @ param template
* @ return true if < code > name < / code > matches < code > template < / code > . */
private static boolean matchWildCards ( String name , String template ) { } }
|
int wildcardIndex = template . indexOf ( "*" ) ; if ( wildcardIndex == - 1 ) { return name . equals ( template ) ; } boolean isBeginning = true ; String beforeWildcard ; String afterWildcard = template ; while ( wildcardIndex != - 1 ) { beforeWildcard = afterWildcard . substring ( 0 , wildcardIndex ) ; afterWildcard = afterWildcard . substring ( wildcardIndex + 1 ) ; int beforeStartIndex = name . indexOf ( beforeWildcard ) ; if ( ( beforeStartIndex == - 1 ) || ( isBeginning && beforeStartIndex != 0 ) ) { return false ; } isBeginning = false ; name = name . substring ( beforeStartIndex + beforeWildcard . length ( ) ) ; wildcardIndex = afterWildcard . indexOf ( "*" ) ; } return name . endsWith ( afterWildcard ) ;
|
public class SSTableExport { /** * than once from within the same process . */
static void export ( SSTableReader reader , PrintStream outs , String [ ] excludes , CFMetaData metadata ) throws IOException { } }
|
Set < String > excludeSet = new HashSet < String > ( ) ; if ( excludes != null ) excludeSet = new HashSet < String > ( Arrays . asList ( excludes ) ) ; SSTableIdentityIterator row ; ISSTableScanner scanner = reader . getScanner ( ) ; try { outs . println ( "[" ) ; int i = 0 ; // collecting keys to export
while ( scanner . hasNext ( ) ) { row = ( SSTableIdentityIterator ) scanner . next ( ) ; String currentKey = row . getColumnFamily ( ) . metadata ( ) . getKeyValidator ( ) . getString ( row . getKey ( ) . getKey ( ) ) ; if ( excludeSet . contains ( currentKey ) ) continue ; else if ( i != 0 ) outs . println ( "," ) ; serializeRow ( row , row . getKey ( ) , outs ) ; checkStream ( outs ) ; i ++ ; } outs . println ( "\n]" ) ; outs . flush ( ) ; } finally { scanner . close ( ) ; }
|
public class SoapClient { /** * 设置请求方法
* @ param name 方法名及其命名空间
* @ param params 参数
* @ param useMethodPrefix 是否使用方法的命名空间前缀
* @ return this */
public SoapClient setMethod ( Name name , Map < String , Object > params , boolean useMethodPrefix ) { } }
|
return setMethod ( new QName ( name . getURI ( ) , name . getLocalName ( ) , name . getPrefix ( ) ) , params , useMethodPrefix ) ;
|
public class DataUtils { /** * Bubble sort
* @ param targets */
public static void sort ( final List < Artifact > targets ) { } }
|
int n = targets . size ( ) ; while ( n != 0 ) { int newn = 0 ; for ( int i = 1 ; i <= n - 1 ; i ++ ) { if ( targets . get ( i - 1 ) . toString ( ) . compareTo ( targets . get ( i ) . toString ( ) ) > 0 ) { Collections . swap ( targets , i - 1 , i ) ; newn = i ; } } n = newn ; }
|
public class OWLSameIndividualAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } .
* @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the
* object ' s content to
* @ param instance the object instance to serialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the serialization operation is not
* successful */
@ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLSameIndividualAxiomImpl instance ) throws SerializationException { } }
|
serialize ( streamWriter , instance ) ;
|
public class gslbservice_lbmonitor_binding { /** * Use this API to fetch gslbservice _ lbmonitor _ binding resources of given name . */
public static gslbservice_lbmonitor_binding [ ] get ( nitro_service service , String servicename ) throws Exception { } }
|
gslbservice_lbmonitor_binding obj = new gslbservice_lbmonitor_binding ( ) ; obj . set_servicename ( servicename ) ; gslbservice_lbmonitor_binding response [ ] = ( gslbservice_lbmonitor_binding [ ] ) obj . get_resources ( service ) ; return response ;
|
public class BccClient { /** * Listing volumes owned by the authenticated user .
* @ param request The request containing all options for listing volumes owned by the authenticated user .
* @ return The response containing a list of volume owned by the authenticated user . */
public ListVolumesResponse listVolumes ( ListVolumesRequest request ) { } }
|
InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . GET , VOLUME_PREFIX ) ; if ( request . getMarker ( ) != null ) { internalRequest . addParameter ( "marker" , request . getMarker ( ) ) ; } if ( request . getMaxKeys ( ) > 0 ) { internalRequest . addParameter ( "maxKeys" , String . valueOf ( request . getMaxKeys ( ) ) ) ; } if ( ! Strings . isNullOrEmpty ( request . getInstanceId ( ) ) ) { internalRequest . addParameter ( "instanceId" , request . getInstanceId ( ) ) ; } if ( ! Strings . isNullOrEmpty ( request . getZoneName ( ) ) ) { internalRequest . addParameter ( "zoneName" , request . getZoneName ( ) ) ; } return invokeHttpClient ( internalRequest , ListVolumesResponse . class ) ;
|
public class CmsImportVersion5 { /** * Reads all the relations of the resource from the < code > manifest . xml < / code > file
* and adds them to the according resource . < p >
* @ param resource the resource to import the relations for
* @ param parentElement the current element */
protected void importRelations ( CmsResource resource , Element parentElement ) { } }
|
// Get the nodes for the relations
@ SuppressWarnings ( "unchecked" ) List < Node > relationElements = parentElement . selectNodes ( "./" + A_CmsImport . N_RELATIONS + "/" + A_CmsImport . N_RELATION ) ; List < CmsRelation > relations = new ArrayList < CmsRelation > ( ) ; // iterate over the nodes
Iterator < Node > itRelations = relationElements . iterator ( ) ; while ( itRelations . hasNext ( ) ) { Element relationElement = ( Element ) itRelations . next ( ) ; String structureID = getChildElementTextValue ( relationElement , A_CmsImport . N_RELATION_ATTRIBUTE_ID ) ; String targetPath = getChildElementTextValue ( relationElement , A_CmsImport . N_RELATION_ATTRIBUTE_PATH ) ; String relationType = getChildElementTextValue ( relationElement , A_CmsImport . N_RELATION_ATTRIBUTE_TYPE ) ; CmsUUID targetId = new CmsUUID ( structureID ) ; CmsRelationType type = CmsRelationType . valueOf ( relationType ) ; CmsRelation relation = new CmsRelation ( resource . getStructureId ( ) , resource . getRootPath ( ) , targetId , targetPath , type ) ; relations . add ( relation ) ; } if ( ! relations . isEmpty ( ) ) { m_importedRelations . put ( resource . getRootPath ( ) , relations ) ; }
|
public class GeometryConverterService { /** * Convert a JTS geometry to a Geomajas geometry .
* @ param geometry JTS geometry
* @ return Geomajas geometry
* @ throws JtsConversionException conversion failed */
public static Geometry fromJts ( com . vividsolutions . jts . geom . Geometry geometry ) throws JtsConversionException { } }
|
if ( geometry == null ) { throw new JtsConversionException ( "Cannot convert null argument" ) ; } int srid = geometry . getSRID ( ) ; int precision = - 1 ; PrecisionModel precisionmodel = geometry . getPrecisionModel ( ) ; if ( ! precisionmodel . isFloating ( ) ) { precision = ( int ) Math . log10 ( precisionmodel . getScale ( ) ) ; } String geometryType = getGeometryType ( geometry ) ; Geometry dto = new Geometry ( geometryType , srid , precision ) ; if ( geometry . isEmpty ( ) ) { // nothing to do
} else if ( geometry instanceof Point ) { dto . setCoordinates ( convertCoordinates ( geometry ) ) ; } else if ( geometry instanceof LinearRing ) { dto . setCoordinates ( convertCoordinates ( geometry ) ) ; } else if ( geometry instanceof LineString ) { dto . setCoordinates ( convertCoordinates ( geometry ) ) ; } else if ( geometry instanceof Polygon ) { Polygon polygon = ( Polygon ) geometry ; Geometry [ ] geometries = new Geometry [ polygon . getNumInteriorRing ( ) + 1 ] ; for ( int i = 0 ; i < geometries . length ; i ++ ) { if ( i == 0 ) { geometries [ i ] = fromJts ( polygon . getExteriorRing ( ) ) ; } else { geometries [ i ] = fromJts ( polygon . getInteriorRingN ( i - 1 ) ) ; } } dto . setGeometries ( geometries ) ; } else if ( geometry instanceof MultiPoint ) { dto . setGeometries ( convertGeometries ( geometry ) ) ; } else if ( geometry instanceof MultiLineString ) { dto . setGeometries ( convertGeometries ( geometry ) ) ; } else if ( geometry instanceof MultiPolygon ) { dto . setGeometries ( convertGeometries ( geometry ) ) ; } else { throw new JtsConversionException ( "Cannot convert geometry: Unsupported type." ) ; } return dto ;
|
public class FileSystemAccess { /** * throws an IOException if the current FileSystem isn ' t working */
private FileSystem getFileSystemSafe ( ) throws IOException { } }
|
try { fs . getFileStatus ( new Path ( "/" ) ) ; return fs ; } catch ( NullPointerException e ) { throw new IOException ( "file system not initialized" ) ; }
|
public class ParameterUtil { /** * Fetches the supplied parameter from the request and converts it to an integer . If the
* parameter does not exist or is not a well - formed integer , a data validation exception is
* thrown with the supplied message . */
public static int requireIntParameter ( HttpServletRequest req , String name , String invalidDataMessage ) throws DataValidationException { } }
|
return parseIntParameter ( getParameter ( req , name , false ) , invalidDataMessage ) ;
|
public class ParameterBuilder { /** * Copy builder
* @ param other parameter to copy from
* @ return this */
ParameterBuilder from ( Parameter other ) { } }
|
return name ( other . getName ( ) ) . allowableValues ( other . getAllowableValues ( ) ) . allowMultiple ( other . isAllowMultiple ( ) ) . defaultValue ( other . getDefaultValue ( ) ) . description ( other . getDescription ( ) ) . modelRef ( other . getModelRef ( ) ) . parameterAccess ( other . getParamAccess ( ) ) . parameterType ( other . getParamType ( ) ) . required ( other . isRequired ( ) ) . type ( other . getType ( ) . orElse ( null ) ) . hidden ( other . isHidden ( ) ) . allowEmptyValue ( other . isAllowEmptyValue ( ) ) . order ( other . getOrder ( ) ) . vendorExtensions ( other . getVendorExtentions ( ) ) ;
|
public class PathToObjectIdentifiersMarshaller { /** * Marshall the given parameter object . */
public void marshall ( PathToObjectIdentifiers pathToObjectIdentifiers , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( pathToObjectIdentifiers == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( pathToObjectIdentifiers . getPath ( ) , PATH_BINDING ) ; protocolMarshaller . marshall ( pathToObjectIdentifiers . getObjectIdentifiers ( ) , OBJECTIDENTIFIERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class CmsFileTable { /** * Applies settings generally used within workplace app file lists . < p > */
public void applyWorkplaceAppSettings ( ) { } }
|
// add site path property to container
m_container . addContainerProperty ( CmsResourceTableProperty . PROPERTY_SITE_PATH , CmsResourceTableProperty . PROPERTY_SITE_PATH . getColumnType ( ) , CmsResourceTableProperty . PROPERTY_SITE_PATH . getDefaultValue ( ) ) ; // replace the resource name column with the path column
Object [ ] visibleCols = m_fileTable . getVisibleColumns ( ) ; for ( int i = 0 ; i < visibleCols . length ; i ++ ) { if ( CmsResourceTableProperty . PROPERTY_RESOURCE_NAME . equals ( visibleCols [ i ] ) ) { visibleCols [ i ] = CmsResourceTableProperty . PROPERTY_SITE_PATH ; } } m_fileTable . setVisibleColumns ( visibleCols ) ; m_fileTable . setColumnCollapsible ( CmsResourceTableProperty . PROPERTY_SITE_PATH , false ) ; m_fileTable . setColumnHeader ( CmsResourceTableProperty . PROPERTY_SITE_PATH , CmsVaadinUtils . getMessageText ( CmsResourceTableProperty . PROPERTY_SITE_PATH . getHeaderKey ( ) ) ) ; // update column visibility according to the latest file explorer settings
CmsFileExplorerSettings settings ; try { settings = OpenCms . getWorkplaceAppManager ( ) . getAppSettings ( A_CmsUI . getCmsObject ( ) , CmsFileExplorerSettings . class ) ; setTableState ( settings ) ; } catch ( Exception e ) { LOG . error ( "Error while reading file explorer settings from user." , e ) ; } m_fileTable . setSortContainerPropertyId ( CmsResourceTableProperty . PROPERTY_SITE_PATH ) ; setActionColumnProperty ( CmsResourceTableProperty . PROPERTY_SITE_PATH ) ; setMenuBuilder ( new CmsResourceContextMenuBuilder ( ) ) ;
|
public class SysLibraryLoader { /** * Load a shared library .
* @ param name Name of the library to load .
* @ param verify Ignored , no verification is done .
* @ return true if the library was successfully loaded . */
public boolean load ( String name , boolean verify ) { } }
|
boolean loaded ; try { System . loadLibrary ( name ) ; loaded = true ; } catch ( Throwable e ) { loaded = false ; } return loaded ;
|
public class FeaturePainter { /** * The actual painting function . Draws the circles with the object ' s id .
* @ param paintable
* A { @ link org . geomajas . gwt . client . gfx . paintable . Text } object .
* @ param group
* The group where the object resides in ( optional ) .
* @ param context
* A MapContext object , responsible for actual drawing . */
public void paint ( Paintable paintable , Object group , MapContext context ) { } }
|
if ( paintable != null ) { Feature feature = ( Feature ) paintable ; WorldViewTransformer worldViewTransformer = feature . getLayer ( ) . getMapModel ( ) . getMapView ( ) . getWorldViewTransformer ( ) ; Geometry geometry = worldViewTransformer . worldToPan ( feature . getGeometry ( ) ) ; ShapeStyle style = createStyleForFeature ( feature ) ; PaintableGroup selectionGroup = feature . getLayer ( ) . getSelectionGroup ( ) ; context . getVectorContext ( ) . drawGroup ( selectionGroup , feature ) ; String name = feature . getLayer ( ) . getId ( ) + "-" + feature . getId ( ) ; switch ( geometry . getLayerType ( ) ) { case LINESTRING : context . getVectorContext ( ) . drawLine ( feature , name , ( LineString ) geometry , style ) ; break ; case MULTILINESTRING : MultiLineString mls = ( MultiLineString ) geometry ; for ( int i = 0 ; i < mls . getNumGeometries ( ) ; i ++ ) { context . getVectorContext ( ) . drawLine ( feature , name + "." + i , ( LineString ) mls . getGeometryN ( i ) , style ) ; } break ; case POLYGON : context . getVectorContext ( ) . drawPolygon ( feature , name , ( Polygon ) geometry , style ) ; break ; case MULTIPOLYGON : MultiPolygon mp = ( MultiPolygon ) geometry ; for ( int i = 0 ; i < mp . getNumGeometries ( ) ; i ++ ) { context . getVectorContext ( ) . drawPolygon ( feature , name + "." + i , ( Polygon ) mp . getGeometryN ( i ) , style ) ; } break ; case POINT : if ( hasImageSymbol ( feature ) ) { context . getVectorContext ( ) . drawSymbol ( feature , name , geometry . getCoordinate ( ) , null , feature . getStyleId ( ) + "-selection" ) ; } else { context . getVectorContext ( ) . drawSymbol ( feature , name , geometry . getCoordinate ( ) , style , feature . getStyleId ( ) ) ; } break ; case MULTIPOINT : Coordinate [ ] coordinates = geometry . getCoordinates ( ) ; if ( hasImageSymbol ( feature ) ) { for ( int i = 0 ; i < coordinates . length ; i ++ ) { context . getVectorContext ( ) . drawSymbol ( feature , name + "." + i , coordinates [ i ] , null , feature . getStyleId ( ) + "-selection" ) ; } } else { for ( int i = 0 ; i < coordinates . length ; i ++ ) { context . getVectorContext ( ) . drawSymbol ( feature , name + "." + i , coordinates [ i ] , style , feature . getStyleId ( ) ) ; } } break ; default : throw new IllegalStateException ( "Cannot draw feature with Geometry type " + geometry . getLayerType ( ) ) ; } }
|
public class LocalTranCoordImpl { /** * Enlists the provided < CODE > resource < / CODE >
* object with the target < CODE > LocalTransactionCoordinator < / CODE > in order
* that the resource be coordinated by the LTC .
* The < code > resource < / code > is called to < code > start < / code > as part of the enlist
* processing and will be called to < code > commit < / code > or < code > rollback < / code >
* when the LTC completes .
* The boundary at which the local transaction containment will
* be completed is set at deployment time using the
* < CODE > boundary < / CODE > descriptor .
* < PRE >
* & lt ; local - transaction >
* & lt ; boundary > ActivitySession | BeanMethod & lt ; / boundary >
* & lt ; / local - transaction >
* < / PRE >
* @ param resource The < CODE > OnePhaseXAResource < / CODE > to coordinate
* @ exception IllegalStateException
* Thrown if the LocalTransactionCoordinator is not in a
* valid state to execute the operation , for example if
* a global transaction is active or if a resource has been
* elisted for < b > cleanup < / b > in this LTC scope . */
public void enlist ( OnePhaseXAResource resource ) throws IllegalStateException , LTCSystemException { } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "enlist" , resource ) ; /* Rollup of MD19518
if ( ( _ current = = null ) | | ( _ current . globalTranExists ( ) ) )
IllegalStateException ise = new IllegalStateException ( " Cannot enlist Resource . A Global transaction is active . " ) ;
FFDCFilter . processException ( ise , " com . ibm . tx . ltc . LocalTranCoordImpl . enlist " , " 326 " , this ) ;
Tr . error ( tc , " ERR _ ENLIST _ TX _ GLB _ ACT " ) ;
if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , " enlist " , ise ) ;
throw ise ;
} end rollup of MD19518 */
if ( _cleanupResources != null ) { final IllegalStateException ise = new IllegalStateException ( "Cannot enlist Resource. This LTC scope is being used for cleanup." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.enlist" , "335" , this ) ; Tr . error ( tc , "ERR_ENLIST_TX_CLEANUP" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" , ise ) ; throw ise ; } if ( _rollbackOnly ) { final IllegalStateException ise = new IllegalStateException ( "Cannot enlist Resource. LocalTransaction is marked RollbackOnly." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.enlist" , "344" , this ) ; Tr . error ( tc , "ERR_STATE_RB_ONLY" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" , ise ) ; throw ise ; } // d131059.1 IllegalState matches global transaction behaviour
if ( resource == null ) { final IllegalStateException ise = new IllegalStateException ( "enlist failed. Resource specified was null." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.enlist" , "354" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" , ise ) ; throw ise ; } if ( ( _state != Running ) && ( _state != Suspended ) ) { final IllegalStateException ise = new IllegalStateException ( "Cannot enlist Resource. LocalTransaction is completing or completed." ) ; FFDCFilter . processException ( ise , "com.ibm.tx.ltc.LocalTranCoordImpl.enlist" , "362" , this ) ; Tr . error ( tc , "ERR_ENLIST_LTC_COMPLETE" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" , ise ) ; throw ise ; } // Need to begin the LocalTransaction
// by calling the resource .
try { resource . start ( null , 0 ) ; // Finally , need to add the resource to our internal collection
// so that it can be driven to completion when necessary .
if ( _enlistedResources == null ) { _enlistedResources = new ArrayList < OnePhaseXAResource > ( ) ; // Now we need to get the rest of the LTC config to see how to end the LTC
getComponentMetadataForLTC ( ) ; // This is the first resource ( local transaction ) to be enlisted with the LTC .
// Record this with the performance monitor as the start point for the
// ' LocalTransaction ' . In fact multiple resources local transactions can become
// involved with the LTC and we are going to track them collectively rather
// than individually .
startTime = perfStarted ( ) ; } _enlistedResources . add ( resource ) ; } catch ( XAException xe ) { FFDCFilter . processException ( xe , "com.ibm.tx.ltc.LocalTranCoordImpl.enlist" , "232" , this ) ; Tr . error ( tc , "ERR_XA_RESOURCE_START" , new Object [ ] { resource . getResourceName ( ) , xe } ) ; // Raise an exception to indicate failure
final LTCSystemException ltcse = new LTCSystemException ( "Resource enlistment failed due to failure in xa_start." ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" , ltcse ) ; throw ltcse ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "enlist" ) ;
|
public class BDDFactory { /** * Returns the model count of a given BDD with a given number of unimportant variables .
* @ param bdd the BDD
* @ param unimportantVars the number of unimportant variables
* @ return the model count */
public BigDecimal modelCount ( final BDD bdd , final int unimportantVars ) { } }
|
return modelCount ( bdd ) . divide ( BigDecimal . valueOf ( ( int ) Math . pow ( 2 , unimportantVars ) ) ) ;
|
public class MailAttrProcessor { /** * Added more information to the urlbuilder .
* @ param urlBuilder an urlbuilder .
* @ param name the name of the attribute .
* @ param value the value . */
private void addEncodedValue ( final StringBuilder urlBuilder , final String name , String value ) { } }
|
if ( value != null ) { String encodedValue ; if ( ! "to" . equals ( name ) && ! urlBuilder . toString ( ) . endsWith ( "?" ) ) { urlBuilder . append ( '&' ) ; } try { encodedValue = URLEncoder . encode ( value , "utf-8" ) . replace ( "+" , "%20" ) ; } catch ( UnsupportedEncodingException e ) { LOG . error ( "UTF-8 encoding not supported during encoding " + value , e ) ; encodedValue = value ; } if ( ! "to" . equals ( name ) ) { urlBuilder . append ( name ) . append ( '=' ) ; } urlBuilder . append ( encodedValue ) ; }
|
public class SqlQuery { /** * Constructs a query with named arguments , using the properties / fields of given bean for resolving arguments .
* @ see # namedQuery ( String , VariableResolver )
* @ see VariableResolver # forBean ( Object ) */
public static @ NotNull SqlQuery namedQuery ( @ NotNull @ SQL String sql , @ NotNull Object bean ) { } }
|
return namedQuery ( sql , VariableResolver . forBean ( bean ) ) ;
|
public class Segment { /** * Reads the term for the entry at the given index .
* @ param index The index for which to read the term .
* @ return The term for the given index .
* @ throws IllegalStateException if the segment is not open or { @ code index } is inconsistent */
public long term ( long index ) { } }
|
assertSegmentOpen ( ) ; checkRange ( index ) ; // Get the offset of the index within this segment .
long offset = relativeOffset ( index ) ; // Look up the term for the offset in the term index .
return termIndex . lookup ( offset ) ;
|
public class ScaleInfo { /** * Sets the scale value in pixel per map unit .
* @ param pixelPerUnit
* the scale value ( pix / map unit ) */
public void setPixelPerUnit ( double pixelPerUnit ) { } }
|
if ( pixelPerUnit < MINIMUM_PIXEL_PER_UNIT ) { pixelPerUnit = MINIMUM_PIXEL_PER_UNIT ; } if ( pixelPerUnit > MAXIMUM_PIXEL_PER_UNIT ) { pixelPerUnit = MAXIMUM_PIXEL_PER_UNIT ; } this . pixelPerUnit = pixelPerUnit ; setPixelPerUnitBased ( true ) ; postConstruct ( ) ;
|
public class RunnersApi { /** * Get a Stream of jobs that are being processed or were processed by specified Runner .
* < pre > < code > GitLab Endpoint : GET / runners / : id / jobs < / code > < / pre >
* @ param runnerId The ID of a runner
* @ return a Stream of jobs that are being processed or were processed by specified Runner
* @ throws GitLabApiException if any exception occurs */
public Stream < Job > getJobsStream ( Integer runnerId ) throws GitLabApiException { } }
|
return ( getJobs ( runnerId , null , getDefaultPerPage ( ) ) . stream ( ) ) ;
|
public class BasicUserProfile { /** * Return the authentication attribute with name .
* @ param name authentication attribute name
* @ return the authentication attribute with name */
public Object getAuthenticationAttribute ( final String name ) { } }
|
return ProfileHelper . getInternalAttributeHandler ( ) . restore ( this . authenticationAttributes . get ( name ) ) ;
|
public class InternalSARLParser { /** * $ ANTLR start synpred44 _ InternalSARL */
public final void synpred44_InternalSARL_fragment ( ) throws RecognitionException { } }
|
// InternalSARL . g : 13520:8 : ( ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) ) )
// InternalSARL . g : 13520:9 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
{ // InternalSARL . g : 13520:9 : ( ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) ) )
// InternalSARL . g : 13521:9 : ( ) ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ? ( ( ' | ' ) )
{ // InternalSARL . g : 13521:9 : ( )
// InternalSARL . g : 13522:9:
{ } // InternalSARL . g : 13523:9 : ( ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) * ) ?
int alt396 = 2 ; int LA396_0 = input . LA ( 1 ) ; if ( ( LA396_0 == RULE_ID || ( LA396_0 >= 44 && LA396_0 <= 45 ) || ( LA396_0 >= 92 && LA396_0 <= 95 ) ) ) { alt396 = 1 ; } switch ( alt396 ) { case 1 : // InternalSARL . g : 13524:10 : ( ( ruleJvmFormalParameter ) ) ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
{ // InternalSARL . g : 13524:10 : ( ( ruleJvmFormalParameter ) )
// InternalSARL . g : 13525:11 : ( ruleJvmFormalParameter )
{ // InternalSARL . g : 13525:11 : ( ruleJvmFormalParameter )
// InternalSARL . g : 13526:12 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_134 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } // InternalSARL . g : 13529:10 : ( ' , ' ( ( ruleJvmFormalParameter ) ) ) *
loop395 : do { int alt395 = 2 ; int LA395_0 = input . LA ( 1 ) ; if ( ( LA395_0 == 32 ) ) { alt395 = 1 ; } switch ( alt395 ) { case 1 : // InternalSARL . g : 13530:11 : ' , ' ( ( ruleJvmFormalParameter ) )
{ match ( input , 32 , FOLLOW_75 ) ; if ( state . failed ) return ; // InternalSARL . g : 13531:11 : ( ( ruleJvmFormalParameter ) )
// InternalSARL . g : 13532:12 : ( ruleJvmFormalParameter )
{ // InternalSARL . g : 13532:12 : ( ruleJvmFormalParameter )
// InternalSARL . g : 13533:13 : ruleJvmFormalParameter
{ pushFollow ( FOLLOW_134 ) ; ruleJvmFormalParameter ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } } break ; default : break loop395 ; } } while ( true ) ; } break ; } // InternalSARL . g : 13538:9 : ( ( ' | ' ) )
// InternalSARL . g : 13539:10 : ( ' | ' )
{ // InternalSARL . g : 13539:10 : ( ' | ' )
// InternalSARL . g : 13540:11 : ' | '
{ match ( input , 97 , FOLLOW_2 ) ; if ( state . failed ) return ; } } } }
|
public class OHLCChart { /** * Add a series for a OHLC type chart using Lists
* @ param seriesName
* @ param xData the x - axis data
* @ param openData the open data
* @ param highData the high data
* @ param lowData the low data
* @ param closeData the close data
* @ return A Series object that you can set properties on */
public OHLCSeries addSeries ( String seriesName , List < ? > xData , List < ? extends Number > openData , List < ? extends Number > highData , List < ? extends Number > lowData , List < ? extends Number > closeData ) { } }
|
DataType dataType = getDataType ( xData ) ; switch ( dataType ) { case Date : return addSeries ( seriesName , Utils . getDoubleArrayFromDateList ( xData ) , Utils . getDoubleArrayFromNumberList ( openData ) , Utils . getDoubleArrayFromNumberList ( highData ) , Utils . getDoubleArrayFromNumberList ( lowData ) , Utils . getDoubleArrayFromNumberList ( closeData ) , DataType . Date ) ; default : return addSeries ( seriesName , Utils . getDoubleArrayFromNumberList ( xData ) , Utils . getDoubleArrayFromNumberList ( openData ) , Utils . getDoubleArrayFromNumberList ( highData ) , Utils . getDoubleArrayFromNumberList ( lowData ) , Utils . getDoubleArrayFromNumberList ( closeData ) , DataType . Number ) ; }
|
public class A_CmsTreeTabDataPreloader { /** * Gets the common ancestor of two paths . < p >
* @ param rootPath1 the first path
* @ param rootPath2 the second path
* @ return the common ancestor path */
private String getCommonAncestorPath ( String rootPath1 , String rootPath2 ) { } }
|
if ( rootPath1 == null ) { return rootPath2 ; } if ( rootPath2 == null ) { return rootPath1 ; } rootPath1 = CmsStringUtil . joinPaths ( "/" , rootPath1 , "/" ) ; rootPath2 = CmsStringUtil . joinPaths ( "/" , rootPath2 , "/" ) ; int minLength = Math . min ( rootPath1 . length ( ) , rootPath2 . length ( ) ) ; int i ; for ( i = 0 ; i < minLength ; i ++ ) { char char1 = rootPath1 . charAt ( i ) ; char char2 = rootPath2 . charAt ( i ) ; if ( char1 != char2 ) { break ; } } String result = rootPath1 . substring ( 0 , i ) ; if ( "/" . equals ( result ) ) { return result ; } int slashIndex = result . lastIndexOf ( '/' ) ; result = result . substring ( 0 , slashIndex ) ; return result ;
|
public class SparseHashDoubleVector { /** * { @ inheritDoc } */
public void set ( int index , double value ) { } }
|
double old = vector . get ( index ) ; if ( value == 0 ) vector . remove ( index ) ; else vector . put ( index , value ) ; magnitude = - 1 ;
|
public class SQLCaster { /** * cast a Value to a correspondance CF Type
* @ param item
* @ return cf type
* @ throws PageException */
public static Object toCFTypex ( SQLItem item ) throws PageException { } }
|
try { return _toCFTypex ( item ) ; } catch ( PageException e ) { if ( item . isNulls ( ) ) return item . getValue ( ) ; throw e ; }
|
public class NumberUtil { /** * 将16进制的String转化为Long
* 当str为空或非数字字符串时抛NumberFormatException */
public static Long hexToLongObject ( @ NotNull String str ) { } }
|
// 统一行为 , 不要有时候抛NPE , 有时候抛NumberFormatException
if ( str == null ) { throw new NumberFormatException ( "null" ) ; } return Long . decode ( str ) ;
|
public class FileSystemManager { /** * Gets the first writable directory that exists or can be created .
* @ param directories The directories to try
* @ return A File object that represents a writable directory .
* @ throws FileNotFoundException Thrown if no directory can be written to . */
public static File getWritableDirectoryWithFailovers ( String ... directories ) throws FileNotFoundException { } }
|
File logDir = null ; for ( String directory : directories ) { if ( directory != null ) { try { logDir = ensureDirectoryWriteable ( new File ( directory ) ) ; } catch ( FileNotFoundException e ) { log . debug ( "Failed to get writeable directory: " + directory , e ) ; continue ; } break ; } } if ( logDir == null ) { throw new FileNotFoundException ( "Could not get a writeable directory!" ) ; } return logDir ;
|
public class Log { /** * Returns true if an error needs to be reported for a given
* source name and pos . */
protected boolean shouldReport ( JavaFileObject file , int pos ) { } }
|
if ( file == null ) return true ; Pair < JavaFileObject , Integer > coords = new Pair < > ( file , pos ) ; boolean shouldReport = ! recorded . contains ( coords ) ; if ( shouldReport ) recorded . add ( coords ) ; return shouldReport ;
|
public class StandardBullhornData { /** * { @ inheritDoc } */
@ Override public < T extends QueryEntity > EntityIdBoundaries queryForIdBoundaries ( Class < T > entityClass ) { } }
|
return handleQueryForIdBoundaries ( entityClass ) ;
|
public class Distance { /** * Gets the Manhattan distance between two points .
* @ param p IntPoint with X and Y axis coordinates .
* @ param q IntPoint with X and Y axis coordinates .
* @ return The Manhattan distance between x and y . */
public static double Manhattan ( IntPoint p , IntPoint q ) { } }
|
return Manhattan ( p . x , p . y , q . x , q . y ) ;
|
public class AuxiliaryTree { /** * returns Pair < node , foot > */
private Pair < Tree , Tree > copyHelper ( Tree node , Map < String , Tree > newNamesToNodes ) { } }
|
Tree clone ; Tree newFoot = null ; if ( node . isLeaf ( ) ) { if ( node == foot ) { // found the foot node ; pass it up .
clone = node . treeFactory ( ) . newTreeNode ( node . label ( ) , new ArrayList < Tree > ( 0 ) ) ; newFoot = clone ; } else { clone = node . treeFactory ( ) . newLeaf ( node . label ( ) . labelFactory ( ) . newLabel ( node . label ( ) ) ) ; } } else { List < Tree > newChildren = new ArrayList < Tree > ( node . children ( ) . length ) ; for ( Tree child : node . children ( ) ) { Pair < Tree , Tree > newChild = copyHelper ( child , newNamesToNodes ) ; newChildren . add ( newChild . first ( ) ) ; if ( newChild . second ( ) != null ) { if ( newFoot != null ) { System . err . println ( "Error -- two feet found when copying auxiliary tree " + tree . toString ( ) + "; using last foot found." ) ; } newFoot = newChild . second ( ) ; } } clone = node . treeFactory ( ) . newTreeNode ( node . label ( ) . labelFactory ( ) . newLabel ( node . label ( ) ) , newChildren ) ; if ( nodesToNames . containsKey ( node ) ) { newNamesToNodes . put ( nodesToNames . get ( node ) , clone ) ; } } return new Pair < Tree , Tree > ( clone , newFoot ) ;
|
public class TrainModule { /** * Extract session data from { @ link StatsStorage }
* @ param sid session ID
* @ param ss { @ code StatsStorage } instance
* @ return session data map */
private static Map < String , Object > sessionData ( String sid , StatsStorage ss ) { } }
|
Map < String , Object > dataThisSession = new HashMap < > ( ) ; List < String > workerIDs = ss . listWorkerIDsForSessionAndType ( sid , StatsListener . TYPE_ID ) ; int workerCount = ( workerIDs == null ? 0 : workerIDs . size ( ) ) ; List < Persistable > staticInfo = ss . getAllStaticInfos ( sid , StatsListener . TYPE_ID ) ; long initTime = Long . MAX_VALUE ; if ( staticInfo != null ) { for ( Persistable p : staticInfo ) { initTime = Math . min ( p . getTimeStamp ( ) , initTime ) ; } } long lastUpdateTime = Long . MIN_VALUE ; List < Persistable > lastUpdatesAllWorkers = ss . getLatestUpdateAllWorkers ( sid , StatsListener . TYPE_ID ) ; for ( Persistable p : lastUpdatesAllWorkers ) { lastUpdateTime = Math . max ( lastUpdateTime , p . getTimeStamp ( ) ) ; } dataThisSession . put ( "numWorkers" , workerCount ) ; dataThisSession . put ( "initTime" , initTime == Long . MAX_VALUE ? "" : initTime ) ; dataThisSession . put ( "lastUpdate" , lastUpdateTime == Long . MIN_VALUE ? "" : lastUpdateTime ) ; // add hashmap of workers
if ( workerCount > 0 ) { dataThisSession . put ( "workers" , workerIDs ) ; } // Model info : type , # layers , # params . . .
if ( staticInfo != null && ! staticInfo . isEmpty ( ) ) { StatsInitializationReport sr = ( StatsInitializationReport ) staticInfo . get ( 0 ) ; String modelClassName = sr . getModelClassName ( ) ; if ( modelClassName . endsWith ( "MultiLayerNetwork" ) ) { modelClassName = "MultiLayerNetwork" ; } else if ( modelClassName . endsWith ( "ComputationGraph" ) ) { modelClassName = "ComputationGraph" ; } int numLayers = sr . getModelNumLayers ( ) ; long numParams = sr . getModelNumParams ( ) ; dataThisSession . put ( "modelType" , modelClassName ) ; dataThisSession . put ( "numLayers" , numLayers ) ; dataThisSession . put ( "numParams" , numParams ) ; } else { dataThisSession . put ( "modelType" , "" ) ; dataThisSession . put ( "numLayers" , "" ) ; dataThisSession . put ( "numParams" , "" ) ; } return dataThisSession ;
|
public class ReferenceAccessController { /** * { @ inheritDoc } */
@ Override public boolean isAuthorized ( ClientApplication clientApplication , Action action , Context context ) { } }
|
boolean authorized = false ; if ( clientApplication != null && action != null && action . toString ( ) != null && action . toString ( ) . trim ( ) . length ( ) > 0 ) { for ( Role role : clientApplication . getRoles ( ) ) { // simple check to make sure that
// the value of the action matches the value of one of the roles ( exact match )
if ( role != null && role . toString ( ) != null && role . toString ( ) . equals ( action . toString ( ) ) ) { authorized = true ; break ; } } } return authorized ;
|
public class NumberPath { /** * Method to construct the less than or equals expression for long
* @ param value the long
* @ return Expression */
public Expression < Long > lte ( long value ) { } }
|
String valueString = "'" + value + "'" ; return new Expression < Long > ( this , Operation . lte , valueString ) ;
|
public class BaseExtension { /** * Get the extension or create as needed
* @ param extensionName
* extension name
* @ param tableName
* table name
* @ param columnName
* column name
* @ param definition
* extension definition
* @ param scopeType
* extension scope type
* @ return extension */
protected Extensions getOrCreate ( String extensionName , String tableName , String columnName , String definition , ExtensionScopeType scopeType ) { } }
|
Extensions extension = get ( extensionName , tableName , columnName ) ; if ( extension == null ) { try { if ( ! extensionsDao . isTableExists ( ) ) { geoPackage . createExtensionsTable ( ) ; } extension = new Extensions ( ) ; extension . setTableName ( tableName ) ; extension . setColumnName ( columnName ) ; extension . setExtensionName ( extensionName ) ; extension . setDefinition ( definition ) ; extension . setScope ( scopeType ) ; extensionsDao . create ( extension ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to create '" + extensionName + "' extension for GeoPackage: " + geoPackage . getName ( ) + ", Table Name: " + tableName + ", Column Name: " + columnName , e ) ; } } return extension ;
|
public class aaapreauthenticationpolicy_vpnvserver_binding { /** * Use this API to fetch aaapreauthenticationpolicy _ vpnvserver _ binding resources of given name . */
public static aaapreauthenticationpolicy_vpnvserver_binding [ ] get ( nitro_service service , String name ) throws Exception { } }
|
aaapreauthenticationpolicy_vpnvserver_binding obj = new aaapreauthenticationpolicy_vpnvserver_binding ( ) ; obj . set_name ( name ) ; aaapreauthenticationpolicy_vpnvserver_binding response [ ] = ( aaapreauthenticationpolicy_vpnvserver_binding [ ] ) obj . get_resources ( service ) ; return response ;
|
public class SDVariable { /** * Sum array reduction operation , optionally along specified dimensions . < br >
* Note that if keepDims = true , the output variable has the same rank as the input variable ,
* with the reduced dimensions having size 1 . This can be useful for later broadcast operations ( such as subtracting
* the mean along a dimension ) . < br >
* Example : if input has shape [ a , b , c ] and dimensions = [ 1 ] then output has shape :
* keepDims = true : [ a , 1 , c ] < br >
* keepDims = false : [ a , c ]
* @ param name Output variable name
* @ param keepDims If true : keep the dimensions that are reduced on ( as length 1 ) . False : remove the reduction dimensions
* @ param dimensions Dimensions to reduce over . If dimensions are not specified , full array reduction is performed
* @ return Output variable : reduced array of rank ( input rank - num dimensions ) if keepDims = false , or
* of rank ( input rank ) if keepdims = true */
public SDVariable sum ( String name , boolean keepDims , int ... dimensions ) { } }
|
return sameDiff . sum ( name , this , keepDims , dimensions ) ;
|
public class SherlockAccountAuthenticatorActivity { /** * Sends the result or a Constants . ERROR _ CODE _ CANCELED error if a result isn ' t present . */
public void finish ( ) { } }
|
if ( mAccountAuthenticatorResponse != null ) { // send the result bundle back if set , otherwise send an error .
if ( mResultBundle != null ) { mAccountAuthenticatorResponse . onResult ( mResultBundle ) ; } else { mAccountAuthenticatorResponse . onError ( AccountManager . ERROR_CODE_CANCELED , "canceled" ) ; } mAccountAuthenticatorResponse = null ; } super . finish ( ) ;
|
public class DSet { /** * Compares the first comparable to the second . This is useful to avoid type safety warnings
* when dealing with the keys of { @ link DSet . Entry } values . */
public static int compare ( Comparable < ? > c1 , Comparable < ? > c2 ) { } }
|
@ SuppressWarnings ( "unchecked" ) Comparable < Object > cc1 = ( Comparable < Object > ) c1 ; @ SuppressWarnings ( "unchecked" ) Comparable < Object > cc2 = ( Comparable < Object > ) c2 ; return cc1 . compareTo ( cc2 ) ;
|
public class MediaExceptionProcessor { /** * ( non - Javadoc )
* @ see
* com . microsoft . windowsazure . services . media . entityoperations . EntityContract
* # delete ( com . microsoft . windowsazure . services . media . entityoperations .
* EntityDeleteOperation )
* @ return operation - id if any otherwise null . */
@ Override public String delete ( EntityDeleteOperation deleter ) throws ServiceException { } }
|
try { return service . delete ( deleter ) ; } catch ( UniformInterfaceException e ) { throw processCatch ( new ServiceException ( e ) ) ; } catch ( ClientHandlerException e ) { throw processCatch ( new ServiceException ( e ) ) ; }
|
public class CachingOnDiskSemanticSpace { /** * { @ inheritDoc } If the word is in the semantic space , its vector will be
* temporarily loaded into memory so that subsequent calls will not need to
* go to disk . As memory pressure increases , the vector will be discarded .
* @ throws IOError if any { @ code IOException } occurs when reading the data
* from the underlying semantic space file . */
public synchronized Vector getVector ( String word ) { } }
|
Vector vector = wordToVector . get ( word ) ; if ( vector != null ) return Vectors . immutable ( vector ) ; Vector v = backingSpace . getVector ( word ) ; if ( v != null ) wordToVector . put ( word , v ) ; return v ;
|
public class InternalXtextParser { /** * InternalXtext . g : 2016:1 : entryRuleParameterReference returns [ EObject current = null ] : iv _ ruleParameterReference = ruleParameterReference EOF ; */
public final EObject entryRuleParameterReference ( ) throws RecognitionException { } }
|
EObject current = null ; EObject iv_ruleParameterReference = null ; try { // InternalXtext . g : 2016:59 : ( iv _ ruleParameterReference = ruleParameterReference EOF )
// InternalXtext . g : 2017:2 : iv _ ruleParameterReference = ruleParameterReference EOF
{ newCompositeNode ( grammarAccess . getParameterReferenceRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; iv_ruleParameterReference = ruleParameterReference ( ) ; state . _fsp -- ; current = iv_ruleParameterReference ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
|
public class PropertyManager { /** * Replaces variables like { ENVIRONMENT } with their correct values , also
* handles clarifying obfuscated strings in the form
* KEY = [ [ [ xxxxx ] ] ] */
private String replaceVariables ( String inString ) { } }
|
if ( inString == null || inString == "" ) return inString ; String outString = inString ; // Replace $ ENVIRONMENT with the default environment
outString = stringReplace ( outString , "{ENVIRONMENT}" , defaultEnvironment ) ; // Go through the string and try to replace { . . . } escaped keys
String currentToken = "" ; String replacedString = "" ; boolean inToken = false ; for ( int i = 0 ; i < outString . length ( ) ; i ++ ) { final char currentChar = outString . charAt ( i ) ; if ( ! inToken && currentChar != '{' ) { replacedString = replacedString + currentChar ; } else if ( inToken && currentChar == '{' ) { replacedString += "{" + currentToken ; currentToken = "" ; } else if ( inToken && currentChar != '}' ) { currentToken = currentToken + currentChar ; } else if ( inToken && currentChar == '}' ) { // Try to find a replacement
inToken = false ; String replacement = null ; if ( singletonInstance != null ) { replacement = PropertyManager . getProperty ( "" , currentToken , null ) ; } if ( replacement != null ) { replacedString += replacement ; } else { replacedString += "{" + currentToken + "}" ; } currentToken = "" ; } else if ( ! inToken && currentChar == '{' ) { inToken = true ; currentToken = "" ; } } if ( ! currentToken . equals ( "" ) ) { replacedString += "{" + currentToken ; } outString = replacedString ; // If in = = out , then we ' ve done as many replacements as we can , we ' re done
if ( outString . equals ( inString ) ) { // Finally , check to see if the string should be clarified
if ( inString . startsWith ( "[[[" ) && inString . endsWith ( "]]]" ) ) { // Clarify . . .
final String obfuscatedText = inString . substring ( 3 , inString . length ( ) - 3 ) ; inString = ObfuscationManager . clarify ( obfuscatedText ) ; } return inString ; } return replaceVariables ( outString ) ;
|
public class SwapAnnuity { /** * Function to calculate an ( idealized ) single curve swap annuity for a given schedule and forward curve .
* The discount curve used to calculate the annuity is calculated from the forward curve using classical
* single curve interpretations of forwards and a default period length . The may be a crude approximation .
* Note : This method will consider evaluationTime being 0 , see { @ link net . finmath . marketdata . products . SwapAnnuity # getSwapAnnuity ( double , Schedule , DiscountCurve , AnalyticModel ) } .
* @ param schedule The schedule discretization , i . e . , the period start and end dates . End dates are considered payment dates and start of the next period .
* @ param forwardCurve The forward curve .
* @ return The swap annuity . */
public static double getSwapAnnuity ( Schedule schedule , ForwardCurve forwardCurve ) { } }
|
DiscountCurve discountCurve = new DiscountCurveFromForwardCurve ( forwardCurve . getName ( ) ) ; double evaluationTime = 0.0 ; // Consider only payment time > 0
return getSwapAnnuity ( evaluationTime , schedule , discountCurve , new AnalyticModelFromCurvesAndVols ( new Curve [ ] { forwardCurve , discountCurve } ) ) ;
|
public class Version { /** * < p > Deduces version information from Data Matrix dimensions . < / p >
* @ param numRows Number of rows in modules
* @ param numColumns Number of columns in modules
* @ return Version for a Data Matrix Code of those dimensions
* @ throws FormatException if dimensions do correspond to a valid Data Matrix size */
public static Version getVersionForDimensions ( int numRows , int numColumns ) throws FormatException { } }
|
if ( ( numRows & 0x01 ) != 0 || ( numColumns & 0x01 ) != 0 ) { throw FormatException . getFormatInstance ( ) ; } for ( Version version : VERSIONS ) { if ( version . symbolSizeRows == numRows && version . symbolSizeColumns == numColumns ) { return version ; } } throw FormatException . getFormatInstance ( ) ;
|
public class Category { /** * Creates and defines all of the breadcrumbs for all of the categories .
* @ param categories the categories to create breadcrumbs for */
public void createBreadcrumbs ( List < Category > categories ) { } }
|
categories . forEach ( category -> { category . setBreadcrumb ( getBreadcrumb ( ) + BREADCRUMB_DELIMITER + category . getDescription ( ) ) ; if ( ! Objects . equals ( category . getGroups ( ) , null ) ) { category . getGroups ( ) . forEach ( group -> group . addToBreadcrumb ( getBreadcrumb ( ) ) ) ; } if ( ! Objects . equals ( category . getChildren ( ) , null ) ) { category . createBreadcrumbs ( category . getChildren ( ) ) ; } } ) ;
|
public class TagsApi { /** * Returns a list of most frequently used tags for a user .
* This method requires authentication with ' read ' permission .
* @ return most frequently used tags for the calling user .
* @ throws JinxException if there are any errors .
* @ see < a href = " https : / / www . flickr . com / services / api / flickr . tags . getMostFrequentlyUsed . html " > flickr . tags . getMostFrequentlyUsed < / a > */
public TagsForUser getMostFrequentlyUsed ( ) throws JinxException { } }
|
Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.tags.getMostFrequentlyUsed" ) ; return jinx . flickrGet ( params , TagsForUser . class ) ;
|
public class Rollbar { /** * Record a debug error with human readable description .
* @ param error the error
* @ param description human readable description of error */
public void debug ( Throwable error , String description ) { } }
|
log ( error , null , description , Level . DEBUG ) ;
|
public class ProponoException { /** * Print stack trace for exception and for root cause exception if htere is one .
* @ param s Writer to write to . */
@ Override public void printStackTrace ( final PrintWriter s ) { } }
|
super . printStackTrace ( s ) ; if ( null != mRootCause ) { s . println ( "--- ROOT CAUSE ---" ) ; mRootCause . printStackTrace ( s ) ; }
|
public class UResourceBundle { /** * < strong > [ icu ] < / strong > Creates a UResourceBundle , from which users can extract resources by using
* their corresponding keys . < br > < br >
* Note : Please use this API for loading non - ICU resources . Java security does not
* allow loading of resources across jar files . You must provide your class loader
* to load the resources
* @ param baseName string containing the name of the data package .
* If null the default ICU package name is used .
* @ param locale specifies the locale for which we want to open the resource .
* If null the bundle for default locale is opened .
* @ param loader the loader to use
* @ return a resource bundle for the given base name and locale */
public static UResourceBundle getBundleInstance ( String baseName , ULocale locale , ClassLoader loader ) { } }
|
if ( baseName == null ) { baseName = ICUData . ICU_BASE_NAME ; } if ( locale == null ) { locale = ULocale . getDefault ( ) ; } return getBundleInstance ( baseName , locale . getBaseName ( ) , loader , false ) ;
|
public class Matrix3x2f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix3x2fc # positiveX ( org . joml . Vector2f ) */
public Vector2f positiveX ( Vector2f dir ) { } }
|
float s = m00 * m11 - m01 * m10 ; s = 1.0f / s ; dir . x = m11 * s ; dir . y = - m01 * s ; return dir . normalize ( dir ) ;
|
public class DSLSAMLAuthenticationProvider { /** * Profile for consumption of processed messages , must be set .
* @ param consumer consumer */
@ Override @ Autowired ( required = false ) @ Qualifier ( "webSSOprofileConsumer" ) public void setConsumer ( WebSSOProfileConsumer consumer ) { } }
|
Assert . notNull ( consumer , "WebSSO Profile Consumer can't be null" ) ; this . consumer = consumer ;
|
public class HttpRequestHandlerAdapter { /** * parameter : channel - WebSockectEmulatedChannel */
public boolean isWebSocketClosing ( HttpRequest request ) { } }
|
Channel channel = getWebSocketChannel ( request ) ; if ( channel != null && channel . getParent ( ) != null ) { WebSocketCompositeChannel parent = ( WebSocketCompositeChannel ) channel . getParent ( ) ; if ( parent != null ) { return parent . getReadyState ( ) == ReadyState . CLOSED || parent . getReadyState ( ) == ReadyState . CLOSING ; } } return false ;
|
public class Smb2CreateResponse { /** * { @ inheritDoc }
* @ throws Smb2ProtocolDecodingException
* @ see jcifs . internal . smb2 . ServerMessageBlock2 # readBytesWireFormat ( byte [ ] , int ) */
@ Override protected int readBytesWireFormat ( byte [ ] buffer , int bufferIndex ) throws SMBProtocolDecodingException { } }
|
int start = bufferIndex ; int structureSize = SMBUtil . readInt2 ( buffer , bufferIndex ) ; if ( structureSize != 89 ) { throw new SMBProtocolDecodingException ( "Structure size is not 89" ) ; } this . oplockLevel = buffer [ bufferIndex + 2 ] ; this . openFlags = buffer [ bufferIndex + 3 ] ; bufferIndex += 4 ; this . createAction = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; this . creationTime = SMBUtil . readTime ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . lastAccessTime = SMBUtil . readTime ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . lastWriteTime = SMBUtil . readTime ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . changeTime = SMBUtil . readTime ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . allocationSize = SMBUtil . readInt8 ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . endOfFile = SMBUtil . readInt8 ( buffer , bufferIndex ) ; bufferIndex += 8 ; this . fileAttributes = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; bufferIndex += 4 ; // Reserved2
System . arraycopy ( buffer , bufferIndex , this . fileId , 0 , 16 ) ; bufferIndex += 16 ; int createContextOffset = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; int createContextLength = SMBUtil . readInt4 ( buffer , bufferIndex ) ; bufferIndex += 4 ; if ( createContextOffset > 0 && createContextLength > 0 ) { List < CreateContextResponse > contexts = new LinkedList < > ( ) ; int createContextStart = getHeaderStart ( ) + createContextOffset ; int next = 0 ; do { int cci = createContextStart ; next = SMBUtil . readInt4 ( buffer , cci ) ; cci += 4 ; int nameOffset = SMBUtil . readInt2 ( buffer , cci ) ; int nameLength = SMBUtil . readInt2 ( buffer , cci + 2 ) ; cci += 4 ; int dataOffset = SMBUtil . readInt2 ( buffer , cci + 2 ) ; cci += 4 ; int dataLength = SMBUtil . readInt4 ( buffer , cci ) ; cci += 4 ; byte [ ] nameBytes = new byte [ nameLength ] ; System . arraycopy ( buffer , createContextStart + nameOffset , nameBytes , 0 , nameBytes . length ) ; cci = Math . max ( cci , createContextStart + nameOffset + nameLength ) ; CreateContextResponse cc = createContext ( nameBytes ) ; if ( cc != null ) { cc . decode ( buffer , createContextStart + dataOffset , dataLength ) ; contexts . add ( cc ) ; } cci = Math . max ( cci , createContextStart + dataOffset + dataLength ) ; if ( next > 0 ) { createContextStart += next ; } bufferIndex = Math . max ( bufferIndex , cci ) ; } while ( next > 0 ) ; this . createContexts = contexts . toArray ( new CreateContextResponse [ 0 ] ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Opened " + this . fileName + ": " + Hexdump . toHexString ( this . fileId ) ) ; } return bufferIndex - start ;
|
public class RuleBasedTimeZone { /** * { @ inheritDoc } */
@ Override public TimeZoneTransition getPreviousTransition ( long base , boolean inclusive ) { } }
|
complete ( ) ; if ( historicTransitions == null ) { return null ; } TimeZoneTransition result ; TimeZoneTransition tzt = historicTransitions . get ( 0 ) ; long tt = tzt . getTime ( ) ; if ( inclusive && tt == base ) { result = tzt ; } else if ( tt >= base ) { return null ; } else { int idx = historicTransitions . size ( ) - 1 ; tzt = historicTransitions . get ( idx ) ; tt = tzt . getTime ( ) ; if ( inclusive && tt == base ) { result = tzt ; } else if ( tt < base ) { if ( finalRules != null ) { // Find a transion time with finalRules
Date start0 = finalRules [ 0 ] . getPreviousStart ( base , finalRules [ 1 ] . getRawOffset ( ) , finalRules [ 1 ] . getDSTSavings ( ) , inclusive ) ; Date start1 = finalRules [ 1 ] . getPreviousStart ( base , finalRules [ 0 ] . getRawOffset ( ) , finalRules [ 0 ] . getDSTSavings ( ) , inclusive ) ; if ( start1 . before ( start0 ) ) { tzt = new TimeZoneTransition ( start0 . getTime ( ) , finalRules [ 1 ] , finalRules [ 0 ] ) ; } else { tzt = new TimeZoneTransition ( start1 . getTime ( ) , finalRules [ 0 ] , finalRules [ 1 ] ) ; } } result = tzt ; } else { // Find a transition within the historic transitions
idx -- ; while ( idx >= 0 ) { tzt = historicTransitions . get ( idx ) ; tt = tzt . getTime ( ) ; if ( tt < base || ( inclusive && tt == base ) ) { break ; } idx -- ; } result = tzt ; } } // For now , this implementation ignore transitions with only zone name changes .
TimeZoneRule from = result . getFrom ( ) ; TimeZoneRule to = result . getTo ( ) ; if ( from . getRawOffset ( ) == to . getRawOffset ( ) && from . getDSTSavings ( ) == to . getDSTSavings ( ) ) { // No offset changes . Try previous one
result = getPreviousTransition ( result . getTime ( ) , false /* always exclusive */
) ; } return result ;
|
public class UsersApi { /** * Delete User Application Properties
* Deletes a user & # 39 ; s application properties
* @ param userId User Id ( required )
* @ param aid Application ID ( optional )
* @ return ApiResponse & lt ; PropertiesEnvelope & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < PropertiesEnvelope > deleteUserPropertiesWithHttpInfo ( String userId , String aid ) throws ApiException { } }
|
com . squareup . okhttp . Call call = deleteUserPropertiesValidateBeforeCall ( userId , aid , null , null ) ; Type localVarReturnType = new TypeToken < PropertiesEnvelope > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
|
public class OjbTagsHandler { /** * Returns the current definition on the indicated level .
* @ param level The level
* @ return The definition */
private DefBase getDefForLevel ( String level ) { } }
|
if ( LEVEL_CLASS . equals ( level ) ) { return _curClassDef ; } else if ( LEVEL_FIELD . equals ( level ) ) { return _curFieldDef ; } else if ( LEVEL_REFERENCE . equals ( level ) ) { return _curReferenceDef ; } else if ( LEVEL_COLLECTION . equals ( level ) ) { return _curCollectionDef ; } else if ( LEVEL_OBJECT_CACHE . equals ( level ) ) { return _curObjectCacheDef ; } else if ( LEVEL_INDEX_DESC . equals ( level ) ) { return _curIndexDescriptorDef ; } else if ( LEVEL_TABLE . equals ( level ) ) { return _curTableDef ; } else if ( LEVEL_COLUMN . equals ( level ) ) { return _curColumnDef ; } else if ( LEVEL_FOREIGNKEY . equals ( level ) ) { return _curForeignkeyDef ; } else if ( LEVEL_INDEX . equals ( level ) ) { return _curIndexDef ; } else if ( LEVEL_PROCEDURE . equals ( level ) ) { return _curProcedureDef ; } else if ( LEVEL_PROCEDURE_ARGUMENT . equals ( level ) ) { return _curProcedureArgumentDef ; } else { return null ; }
|
public class IdToObjectMap { /** * Returns an iterator with which to browse the values
* @ return Iterator */
public Iterator iterator ( ) { } }
|
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "iterator" ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "iterator" ) ; return map . values ( ) . iterator ( ) ;
|
public class Configuration { /** * Get the column override
* @ param key schema and table
* @ param column column
* @ return overridden column */
public String getColumnOverride ( SchemaAndTable key , String column ) { } }
|
return nameMapping . getColumnOverride ( key , column ) . or ( column ) ;
|
public class DcosSpec { /** * A PUT request over the body value .
* @ param key
* @ param value
* @ param service
* @ throws Exception */
@ Then ( "^I add a new DCOS label with key '(.+?)' and value '(.+?)' to the service '(.+?)'?$" ) public void sendAppendRequest ( String key , String value , String service ) throws Exception { } }
|
commonspec . runCommandAndGetResult ( "touch " + service + ".json && dcos marathon app show " + service + " > /dcos/" + service + ".json" ) ; commonspec . runCommandAndGetResult ( "cat /dcos/" + service + ".json" ) ; String configFile = commonspec . getRemoteSSHConnection ( ) . getResult ( ) ; String myValue = commonspec . getJSONPathString ( configFile , ".labels" , "0" ) ; String myJson = commonspec . updateMarathonJson ( commonspec . removeJSONPathElement ( configFile , "$.labels" ) ) ; String newValue = myValue . replaceFirst ( "\\{" , "{\"" + key + "\": \"" + value + "\", " ) ; newValue = "\"labels\":" + newValue ; String myFinalJson = myJson . replaceFirst ( "\\{" , "{" + newValue . replace ( "\\n" , "\\\\n" ) + "," ) ; if ( myFinalJson . contains ( "uris" ) ) { String test = myFinalJson . replaceAll ( "\"uris\"" , "\"none\"" ) ; commonspec . runCommandAndGetResult ( "echo '" + test + "' > /dcos/final" + service + ".json" ) ; } else { commonspec . runCommandAndGetResult ( "echo '" + myFinalJson + "' > /dcos/final" + service + ".json" ) ; } commonspec . runCommandAndGetResult ( "dcos marathon app update " + service + " < /dcos/final" + service + ".json" ) ; commonspec . setCommandExitStatus ( commonspec . getRemoteSSHConnection ( ) . getExitStatus ( ) ) ;
|
public class BNFHeadersImpl { /** * @ see com . ibm . wsspi . genericbnf . HeaderStorage # setLimitOfTokenSize ( int ) */
@ Override public void setLimitOfTokenSize ( int size ) { } }
|
if ( 0 >= size ) { throw new IllegalArgumentException ( "Invalid limit on token size: " + size ) ; } this . limitTokenSize = size ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Limit on token size now: " + this . limitTokenSize ) ; }
|
public class WebDriverTool { /** * Finds all elements . Uses the internal { @ link WebElementFinder } .
* @ param by
* the { @ link By } used to locate the elements
* @ return the list of elements */
@ Override public List < WebElement > findElements ( final By by ) { } }
|
return wef . by ( by ) . findAll ( ) ;
|
public class IndexingConfigurationImpl { /** * { @ inheritDoc } */
public void init ( Element config , QueryHandlerContext context , NamespaceMappings nsMappings ) throws Exception { } }
|
ism = context . getItemStateManager ( ) ; NamespaceAccessor nsResolver = new AdditionalNamespaceResolver ( getNamespaces ( config ) ) ; resolver = new LocationFactory ( nsResolver ) ; // new
// ParsingNameResolver ( NameFactoryImpl . getInstance ( ) ,
// nsResolver ) ;
NodeTypeDataManager ntReg = context . getNodeTypeDataManager ( ) ; // List < NodeTypeData > ntNames = ntReg . getAllNodeTypes ( ) ;
List < AggregateRuleImpl > idxAggregates = new ArrayList < AggregateRuleImpl > ( ) ; NodeList indexingConfigs = config . getChildNodes ( ) ; for ( int i = 0 ; i < indexingConfigs . getLength ( ) ; i ++ ) { Node configNode = indexingConfigs . item ( i ) ; if ( configNode . getNodeName ( ) . equals ( "index-rule" ) ) { IndexingRule element = new IndexingRule ( configNode ) ; // register under node type and all its sub types
LOG . debug ( "Found rule '{}' for NodeType '{}'" , element , element . getNodeTypeName ( ) ) ; Set < InternalQName > subs = ntReg . getSubtypes ( element . getNodeTypeName ( ) ) ; subs . add ( element . getNodeTypeName ( ) ) ; for ( InternalQName subTypeName : subs ) { List < IndexingRule > perNtConfig = configElements . get ( subTypeName ) ; if ( perNtConfig == null ) { perNtConfig = new ArrayList < IndexingRule > ( ) ; configElements . put ( subTypeName , perNtConfig ) ; } LOG . debug ( "Registering it for name '{}'" , subTypeName ) ; perNtConfig . add ( new IndexingRule ( element , subTypeName ) ) ; } } else if ( configNode . getNodeName ( ) . equals ( "aggregate" ) ) { idxAggregates . add ( new AggregateRuleImpl ( configNode , resolver , ism ) ) ; } else if ( configNode . getNodeName ( ) . equals ( "analyzers" ) ) { NodeList childNodes = configNode . getChildNodes ( ) ; for ( int j = 0 ; j < childNodes . getLength ( ) ; j ++ ) { Node analyzerNode = childNodes . item ( j ) ; if ( analyzerNode . getNodeName ( ) . equals ( "analyzer" ) ) { String analyzerClassName = analyzerNode . getAttributes ( ) . getNamedItem ( "class" ) . getNodeValue ( ) ; try { Class < ? > clazz = ClassLoading . forName ( analyzerClassName , this ) ; if ( clazz == JcrStandartAnalyzer . class ) { LOG . warn ( "Not allowed to configure " + JcrStandartAnalyzer . class . getName ( ) + " for a property. " + "Using default analyzer for that property." ) ; } else if ( Analyzer . class . isAssignableFrom ( clazz ) ) { Analyzer analyzer = ( Analyzer ) clazz . newInstance ( ) ; NodeList propertyChildNodes = analyzerNode . getChildNodes ( ) ; for ( int k = 0 ; k < propertyChildNodes . getLength ( ) ; k ++ ) { Node propertyNode = propertyChildNodes . item ( k ) ; if ( propertyNode . getNodeName ( ) . equals ( "property" ) ) { // get property name
InternalQName propName = resolver . parseJCRName ( getTextContent ( propertyNode ) ) . getInternalName ( ) ; String fieldName = nsMappings . translateName ( propName ) ; // set analyzer for the fulltext
// property fieldname
int idx = fieldName . indexOf ( ':' ) ; fieldName = fieldName . substring ( 0 , idx + 1 ) + FieldNames . FULLTEXT_PREFIX + fieldName . substring ( idx + 1 ) ; Object prevAnalyzer = analyzers . put ( fieldName , analyzer ) ; if ( prevAnalyzer != null ) { LOG . warn ( "Property " + propName . getName ( ) + " has been configured for multiple analyzers. " + " Last configured analyzer is used" ) ; } } } } else { LOG . warn ( "org.apache.lucene.analysis.Analyzer is not a superclass of " + analyzerClassName + ". Ignoring this configure analyzer" ) ; } } catch ( ClassNotFoundException e ) { LOG . warn ( "Analyzer class not found: " + analyzerClassName , e ) ; } } } } else if ( configNode . getNodeName ( ) . equals ( "exclude" ) ) { excludingRules . add ( new ExcludingRuleImpl ( configNode , ntReg , resolver ) ) ; } } aggregateRules = idxAggregates . toArray ( new AggregateRule [ idxAggregates . size ( ) ] ) ;
|
public class Widget { /** * Called when this { @ link Widget } has had line - of - sight focus for more than
* { @ link # getLongFocusTimeout ( ) } milliseconds . Notifies all
* { @ linkplain OnFocusListener # onLongFocus ( ) listeners } ; if none of the
* listeners has completely handled the event , { @ link # onLongFocus ( ) } is
* called . */
private void doOnLongFocus ( ) { } }
|
final List < OnFocusListener > focusListeners ; synchronized ( mFocusListeners ) { focusListeners = new ArrayList < > ( mFocusListeners ) ; } for ( OnFocusListener listener : focusListeners ) { if ( listener . onLongFocus ( this ) ) { return ; } } onLongFocus ( ) ; final boolean inFollowFocusGroup = isInFollowFocusGroup ( ) ; for ( Widget child : getChildren ( ) ) { if ( child . mFocusEnabled && ( mChildrenFollowFocus || child . mFollowParentFocus || inFollowFocusGroup ) ) { child . doOnLongFocus ( ) ; } }
|
public class InviteUsersRequest { /** * The user email addresses to which to send the invite .
* @ param userEmailList
* The user email addresses to which to send the invite . */
public void setUserEmailList ( java . util . Collection < String > userEmailList ) { } }
|
if ( userEmailList == null ) { this . userEmailList = null ; return ; } this . userEmailList = new java . util . ArrayList < String > ( userEmailList ) ;
|
public class FilterImpl { /** * ( non - Javadoc )
* @ see com . impetus . kundera . classreading . Filter # accepts ( java . lang . String ) */
@ Override public final boolean accepts ( String filename ) { } }
|
if ( filename . endsWith ( ".class" ) ) { if ( filename . startsWith ( "/" ) ) { filename = filename . substring ( 1 ) ; } if ( ! ignoreScan ( filename . replace ( '/' , '.' ) ) ) { return true ; } } return false ;
|
public class LoggingConfiguration { /** * Kick start the blitz4j implementation .
* @ param props
* - The overriding < em > log4j < / em > properties if any . */
public void configure ( Properties props ) { } }
|
this . refreshCount . set ( 0 ) ; this . overrideProps . clear ( ) ; this . originalAsyncAppenderNameMap . clear ( ) ; // First try to load the log4j configuration file from the classpath
String log4jConfigurationFile = System . getProperty ( PROP_LOG4J_CONFIGURATION ) ; NFHierarchy nfHierarchy = null ; // Make log4j use blitz4j implementations
if ( ( ! NFHierarchy . class . equals ( LogManager . getLoggerRepository ( ) . getClass ( ) ) ) ) { nfHierarchy = new NFHierarchy ( new NFRootLogger ( org . apache . log4j . Level . INFO ) ) ; org . apache . log4j . LogManager . setRepositorySelector ( new NFRepositorySelector ( nfHierarchy ) , guard ) ; } String log4jLoggerFactory = System . getProperty ( PROP_LOG4J_LOGGER_FACTORY ) ; if ( log4jLoggerFactory != null ) { this . initialProps . setProperty ( PROP_LOG4J_LOGGER_FACTORY , log4jLoggerFactory ) ; if ( nfHierarchy != null ) { try { LoggerFactory loggerFactory = ( LoggerFactory ) Class . forName ( log4jLoggerFactory ) . newInstance ( ) ; nfHierarchy . setLoggerFactory ( loggerFactory ) ; } catch ( Exception e ) { System . err . println ( "Cannot set the logger factory. Hence reverting to default." ) ; e . printStackTrace ( ) ; } } } else { this . initialProps . setProperty ( PROP_LOG4J_LOGGER_FACTORY , BLITZ_LOGGER_FACTORY ) ; } if ( log4jConfigurationFile != null ) { loadLog4jConfigurationFile ( log4jConfigurationFile ) ; // First configure without async so that we can capture the output
// of dependent libraries
clearAsyncAppenderList ( ) ; PropertyConfigurator . configure ( this . initialProps ) ; } this . blitz4jConfig = new DefaultBlitz4jConfig ( props ) ; if ( ( log4jConfigurationFile == null ) && ( blitz4jConfig . shouldLoadLog4jPropertiesFromClassPath ( ) ) ) { try { URL url = Loader . getResource ( LOG4J_PROPERTIES ) ; if ( url != null ) { try ( InputStream in = url . openStream ( ) ) { this . initialProps . load ( in ) ; } } } catch ( Exception t ) { System . err . println ( "Error loading properties from " + LOG4J_PROPERTIES ) ; } } Enumeration enumeration = props . propertyNames ( ) ; while ( enumeration . hasMoreElements ( ) ) { String key = ( String ) enumeration . nextElement ( ) ; String propertyValue = props . getProperty ( key ) ; this . initialProps . setProperty ( key , propertyValue ) ; } this . blitz4jConfig = new DefaultBlitz4jConfig ( this . initialProps ) ; String [ ] asyncAppenderArray = blitz4jConfig . getAsyncAppenders ( ) ; if ( asyncAppenderArray == null ) { return ; } for ( int i = 0 ; i < asyncAppenderArray . length ; i ++ ) { String oneAppenderName = asyncAppenderArray [ i ] ; if ( ( i == 0 ) || ( oneAppenderName == null ) ) { continue ; } oneAppenderName = oneAppenderName . trim ( ) ; String oneAsyncAppenderName = oneAppenderName + ASYNC_APPENDERNAME_SUFFIX ; originalAsyncAppenderNameMap . put ( oneAppenderName , oneAsyncAppenderName ) ; } try { convertConfiguredAppendersToAsync ( this . initialProps ) ; } catch ( Exception e ) { throw new RuntimeException ( "Could not configure async appenders " , e ) ; } // Yes second time init required as properties would have been during async appender conversion
this . blitz4jConfig = new DefaultBlitz4jConfig ( this . initialProps ) ; clearAsyncAppenderList ( ) ; PropertyConfigurator . configure ( this . initialProps ) ; closeNonexistingAsyncAppenders ( ) ; this . logger = org . slf4j . LoggerFactory . getLogger ( LoggingConfiguration . class ) ; ConfigurationManager . getConfigInstance ( ) . addConfigurationListener ( new ExpandedConfigurationListenerAdapter ( this ) ) ;
|
public class responderhtmlpage { /** * Use this API to fetch all the responderhtmlpage resources that are configured on netscaler . */
public static responderhtmlpage get ( nitro_service service ) throws Exception { } }
|
responderhtmlpage obj = new responderhtmlpage ( ) ; responderhtmlpage [ ] response = ( responderhtmlpage [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
|
public class Lockable { /** * Write - lock key and delete ; blocking .
* Throws IAE if the key is already locked . */
public static void delete ( Key key ) { } }
|
Value val = DKV . get ( key ) ; if ( val == null ) return ; ( ( Lockable ) val . get ( ) ) . delete ( ) ;
|
public class RTMPProtocolEncoder { /** * Encode packet .
* @ param packet
* RTMP packet
* @ return Encoded data */
public IoBuffer encodePacket ( Packet packet ) { } }
|
IoBuffer out = null ; Header header = packet . getHeader ( ) ; int channelId = header . getChannelId ( ) ; // log . trace ( " Channel id : { } " , channelId ) ;
IRTMPEvent message = packet . getMessage ( ) ; if ( message instanceof ChunkSize ) { ChunkSize chunkSizeMsg = ( ChunkSize ) message ; ( ( RTMPConnection ) Red5 . getConnectionLocal ( ) ) . getState ( ) . setWriteChunkSize ( chunkSizeMsg . getSize ( ) ) ; } // normally the message is expected not to be dropped
if ( ! dropMessage ( channelId , message ) ) { // log . trace ( " Header time : { } message timestamp : { } " , header . getTimer ( ) , message . getTimestamp ( ) ) ;
IoBuffer data = encodeMessage ( header , message ) ; if ( data != null ) { RTMP rtmp = ( ( RTMPConnection ) Red5 . getConnectionLocal ( ) ) . getState ( ) ; // set last write packet
rtmp . setLastWritePacket ( channelId , packet ) ; // ensure we ' re at the beginning
if ( data . position ( ) != 0 ) { data . flip ( ) ; } else { data . rewind ( ) ; } // length of the data to be chunked
int dataLen = data . limit ( ) ; header . setSize ( dataLen ) ; // if ( log . isTraceEnabled ( ) ) {
// log . trace ( " Message : { } " , data ) ;
// chunk size for writing
int chunkSize = rtmp . getWriteChunkSize ( ) ; // number of chunks to write
int numChunks = ( int ) Math . ceil ( dataLen / ( float ) chunkSize ) ; // get last header
Header lastHeader = rtmp . getLastWriteHeader ( channelId ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Channel id: {} chunkSize: {}" , channelId , chunkSize ) ; } // attempt to properly guess the size of the buffer we ' ll need
int bufSize = dataLen + 18 + ( numChunks * 2 ) ; // log . trace ( " Allocated buffer size : { } " , bufSize ) ;
out = IoBuffer . allocate ( bufSize , false ) ; out . setAutoExpand ( true ) ; do { // encode the header
encodeHeader ( header , lastHeader , out ) ; // write a chunk
byte [ ] buf = new byte [ Math . min ( chunkSize , data . remaining ( ) ) ] ; data . get ( buf ) ; // log . trace ( " Buffer : { } " , Hex . encodeHexString ( buf ) ) ;
out . put ( buf ) ; // move header over to last header
lastHeader = header . clone ( ) ; } while ( data . hasRemaining ( ) ) ; // collapse the time stamps on the last header after decode is complete
lastHeader . setTimerBase ( lastHeader . getTimer ( ) ) ; // clear the delta
lastHeader . setTimerDelta ( 0 ) ; // set last write header
rtmp . setLastWriteHeader ( channelId , lastHeader ) ; data . free ( ) ; out . flip ( ) ; data = null ; } } message . release ( ) ; return out ;
|
public class Options { /** * Return the value of the named property .
* @ param name String property name
* @ return int value of property
* @ throws OptionsException */
@ Override public int getIntProperty ( final String name ) throws OptionsException { } }
|
String val = getStringProperty ( name ) ; try { return Integer . valueOf ( val ) . intValue ( ) ; } catch ( Throwable t ) { throw new OptionsException ( "org.bedework.calenv.bad.option.value" ) ; }
|
public class Skip32 { /** * Applies the SKIP32 function on the provided value stored in buf and
* modifies it inplace . This is a low - level function used by the encrypt and
* decrypt functions .
* @ param key
* @ param buf
* @ param encrypt */
public static void skip32 ( byte [ ] key , int [ ] buf , boolean encrypt ) { } }
|
int k ; /* round number */
int i ; /* round counter */
int kstep ; int wl , wr ; /* sort out direction */
if ( encrypt ) { kstep = 1 ; k = 0 ; } else { kstep = - 1 ; k = 23 ; } /* pack into words */
wl = ( buf [ 0 ] << 8 ) + buf [ 1 ] ; wr = ( buf [ 2 ] << 8 ) + buf [ 3 ] ; /* 24 feistel rounds , doubled up */
for ( i = 0 ; i < 24 / 2 ; ++ i ) { wr ^= g ( key , k , wl ) ^ k ; k += kstep ; wl ^= g ( key , k , wr ) ^ k ; k += kstep ; } /* implicitly swap halves while unpacking */
buf [ 0 ] = ( wr >> 8 ) ; buf [ 1 ] = ( wr & 0xFF ) ; buf [ 2 ] = ( wl >> 8 ) ; buf [ 3 ] = ( wl & 0xFF ) ;
|
public class ChemObjectIO { /** * { @ inheritDoc } */
@ Override public void addSettings ( Collection < IOSetting > settings ) { } }
|
for ( IOSetting setting : settings ) { if ( hasSetting ( setting . getName ( ) ) ) { try { getSetting ( setting . getName ( ) ) . setSetting ( setting . getSetting ( ) ) ; } catch ( CDKException ex ) { // setting value was invalid ( ignore as we already have a value for this setting
// and we can ' t throw CDKException as IChemObject is in interfaces module )
} } else { addSetting ( setting ) ; } }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.