signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class OutputContext { /** * Get an OutputBuffer containing a subset of the current one * @ param limit How many bytes should be available * @ return a new OutputContext */ public OutputContext getSlice ( int limit ) { } }
List < ByteBuffer > newBufs = new LinkedList < ByteBuffer > ( ) ; ByteBuffer buf = ByteBuffer . allocate ( limit ) ; Iterator < ByteBuffer > iter = buffers . iterator ( ) ; while ( iter . hasNext ( ) && buf . position ( ) < buf . limit ( ) ) { ByteBuffer cur = iter . next ( ) ; int diff = buf . limit ( ) - buf . position ( ) ; if ( diff > cur . limit ( ) ) { buf . put ( cur ) ; iter . remove ( ) ; } else { ByteBuffer slice = cur . duplicate ( ) ; slice . limit ( diff ) ; buf . put ( slice ) ; } } return new OutputContext ( newBufs ) ;
public class ConcurrentCache { /** * Returns a set of entries in this cache . * @ return cache entries */ public Set < Entry < K , V > > entrySet ( ) { } }
TreeSet < Entry < K , V > > set = new TreeSet < Entry < K , V > > ( ) ; synchronized ( this ) { for ( K key : keySet ( ) ) { if ( containsKey ( key ) ) { get ( key ) ; set . add ( getEntry ( key ) ) ; } } return set ; }
public class CachingPolicy { /** * If the request is cached an { @ link IConnectorInterceptor } is set in order to prevent the back - end connection to be established . * Otherwise an empty { @ link CachedResponse } will be added to the context , this will be used to cache the response once it has been * received from the back - end API * @ see io . apiman . gateway . engine . policies . AbstractMappedPolicy # doApply ( io . apiman . gateway . engine . beans . ApiRequest , io . apiman . gateway . engine . policy . IPolicyContext , java . lang . Object , io . apiman . gateway . engine . policy . IPolicyChain ) */ @ Override protected void doApply ( final ApiRequest request , final IPolicyContext context , final CachingConfig config , final IPolicyChain < ApiRequest > chain ) { } }
if ( config . getTtl ( ) > 0 ) { // Check to see if there is a cache entry for this request . If so , we need to // short - circuit the connector factory by providing a connector interceptor String cacheId = buildCacheID ( request ) ; context . setAttribute ( CACHE_ID_ATTR , cacheId ) ; ICacheStoreComponent cache = context . getComponent ( ICacheStoreComponent . class ) ; cache . getBinary ( cacheId , ApiResponse . class , new IAsyncResultHandler < ISignalReadStream < ApiResponse > > ( ) { @ Override public void handle ( IAsyncResult < ISignalReadStream < ApiResponse > > result ) { if ( result . isError ( ) ) { chain . throwError ( result . getError ( ) ) ; } else { ISignalReadStream < ApiResponse > cacheEntry = result . getResult ( ) ; if ( cacheEntry != null ) { context . setConnectorInterceptor ( new CacheConnectorInterceptor ( cacheEntry ) ) ; context . setAttribute ( SHOULD_CACHE_ATTR , Boolean . FALSE ) ; context . setAttribute ( CACHED_RESPONSE , cacheEntry . getHead ( ) ) ; } chain . doApply ( request ) ; } } } ) ; } else { context . setAttribute ( SHOULD_CACHE_ATTR , Boolean . FALSE ) ; chain . doApply ( request ) ; }
public class CharStream { /** * Zip together the " a " , " b " and " c " arrays until one of them runs out of values . * Each triple of values is combined into a single value using the supplied zipFunction function . * @ param a * @ param b * @ return */ public static CharStream zip ( final char [ ] a , final char [ ] b , final char [ ] c , final CharTriFunction < Character > zipFunction ) { } }
return Stream . zip ( a , b , c , zipFunction ) . mapToChar ( ToCharFunction . UNBOX ) ;
public class AclXmlFactory { /** * Returns an XML fragment representing the specified Grantee . * @ param grantee * The grantee to convert to an XML representation that can be * sent to Amazon S3 as part of a request . * @ param xml * The XmlWriter to which to concatenate this node to . * @ return The given XmlWriter containing the specified grantee . * @ throws SdkClientException * If the specified grantee type isn ' t recognized . */ protected XmlWriter convertToXml ( Grantee grantee , XmlWriter xml ) throws SdkClientException { } }
if ( grantee instanceof CanonicalGrantee ) { return convertToXml ( ( CanonicalGrantee ) grantee , xml ) ; } else if ( grantee instanceof EmailAddressGrantee ) { return convertToXml ( ( EmailAddressGrantee ) grantee , xml ) ; } else if ( grantee instanceof GroupGrantee ) { return convertToXml ( ( GroupGrantee ) grantee , xml ) ; } else { throw new SdkClientException ( "Unknown Grantee type: " + grantee . getClass ( ) . getName ( ) ) ; }
public class JaxWsHttpServletRequestAdapter { /** * ( non - Javadoc ) * @ see javax . servlet . ServletRequest # getDispatcherType ( ) */ @ Override public DispatcherType getDispatcherType ( ) { } }
try { collaborator . preInvoke ( componentMetaData ) ; return request . getDispatcherType ( ) ; } finally { collaborator . postInvoke ( ) ; }
public class Transformations { /** * rSum transformation */ public float rSum ( float [ ] y , float [ ] w ) { } }
float tmp1 = ( float ) 0.0 , tmp2 = ( float ) 0.0 ; for ( int i = 0 ; i < y . length ; i ++ ) { tmp1 += y [ i ] * w [ i ] ; tmp2 += w [ i ] ; } return correctTo01 ( tmp1 / tmp2 ) ;
public class CharRange { /** * < p > Are all the characters of the passed in range contained in * this range . < / p > * @ param range the range to check against * @ return { @ code true } if this range entirely contains the input range * @ throws IllegalArgumentException if { @ code null } input */ public boolean contains ( final CharRange range ) { } }
Validate . isTrue ( range != null , "The Range must not be null" ) ; if ( negated ) { if ( range . negated ) { return start >= range . start && end <= range . end ; } return range . end < start || range . start > end ; } if ( range . negated ) { return start == 0 && end == Character . MAX_VALUE ; } return start <= range . start && end >= range . end ;
public class Counters { /** * Multiplies each value in target by the given multiplier , in place . * @ param target * The values in this Counter will be changed throught by the * multiplier * @ param multiplier * The number by which to change each number in the Counter */ public static < E > Counter < E > multiplyInPlace ( Counter < E > target , double multiplier ) { } }
for ( Entry < E , Double > entry : target . entrySet ( ) ) { target . setCount ( entry . getKey ( ) , entry . getValue ( ) * multiplier ) ; } return target ;
public class ObjIterator { /** * Lazy evaluation . * @ param arraySupplier * @ return */ public static < T > ObjIterator < T > oF ( final Supplier < T [ ] > arraySupplier ) { } }
N . checkArgNotNull ( arraySupplier , "arraySupplier" ) ; return new ObjIterator < T > ( ) { private T [ ] aar = null ; private int len = 0 ; private int cur = 0 ; private boolean isInitialized = false ; @ Override public boolean hasNext ( ) { if ( isInitialized == false ) { init ( ) ; } return cur < len ; } @ Override public T next ( ) { if ( isInitialized == false ) { init ( ) ; } if ( cur >= len ) { throw new NoSuchElementException ( ) ; } return aar [ cur ++ ] ; } private void init ( ) { if ( isInitialized == false ) { isInitialized = true ; aar = arraySupplier . get ( ) ; len = N . len ( aar ) ; } } } ;
public class FieldInfo { /** * Handle { @ link Repeatable } annotations . * @ param allRepeatableAnnotationNames * the names of all repeatable annotations */ void handleRepeatableAnnotations ( final Set < String > allRepeatableAnnotationNames ) { } }
if ( annotationInfo != null ) { annotationInfo . handleRepeatableAnnotations ( allRepeatableAnnotationNames , getClassInfo ( ) , RelType . FIELD_ANNOTATIONS , RelType . CLASSES_WITH_FIELD_ANNOTATION ) ; }
public class KeywordQueryFactory { /** * - - - - - private methods - - - - - */ protected String escape ( final Object src ) { } }
final StringBuilder output = new StringBuilder ( ) ; final String input = src . toString ( ) ; for ( int i = 0 ; i < input . length ( ) ; i ++ ) { final char c = input . charAt ( i ) ; final String prefix = SPECIAL_CHARS . get ( c ) ; if ( prefix != null ) { output . append ( prefix ) ; } output . append ( c ) ; } return output . toString ( ) ;
public class MlBaseState { /** * { @ inheritDoc } */ @ Override public List < T > multiGet ( List < List < Object > > keys ) { } }
// keysにはStateBaseNameの名称のみが指定される 。 そのため 、 1要素目を用いればいい 。 String baseKey = ( String ) keys . get ( 0 ) . get ( 0 ) ; // 前回のクラスタ実行結果を取得する 。 存在しない場合は空値として扱う 。 List < T > dataModels = new ArrayList < > ( ) ; T dataModel = null ; Long previousTxId = null ; if ( this . previousSaveTxId == null ) { // クエリによってtxIdが存在しない状態で本メソッドが呼ばれる可能性があるため 、 nullの場合は補正を行う 。 if ( this . txId == null ) { previousTxId = 0L ; } else { previousTxId = this . txId - 1 ; } } else { previousTxId = this . previousSaveTxId ; } try { dataModel = getState ( baseKey , previousTxId , this . partitionIndex , true ) ; } catch ( IOException ex ) { logger . warn ( "State get failed. BaseKey=" + baseKey + " ,txId=" + previousTxId + " ,partitionIndex=" + this . partitionIndex , ex ) ; } // 取得できなかった場合は空リストを返す if ( dataModel == null ) { return dataModels ; } // 前回実行結果が存在し 、 前回の状態マージから一定時刻が経過していた場合 、 他のパーティションとのマージ処理を実行 if ( isExecuteMerge ( ) == true ) { for ( int nowIndex = 0 ; nowIndex < this . numPartitions ; nowIndex ++ ) { // 自分と同じインデックスの場合は省略 if ( nowIndex == this . partitionIndex ) { continue ; } T otherDataSet = null ; try { otherDataSet = getState ( baseKey , this . previousSaveTxId , nowIndex , false ) ; } catch ( IOException ex ) { logger . warn ( "MergeTargetState get failed. BaseKey=" + baseKey + " ,txId=" + previousTxId + " ,partitionIndex=" + nowIndex , ex ) ; } // 前回の他パーティションの結果が存在した場合 、 順次マージを行う if ( otherDataSet != null ) { dataModel = mergeState ( dataModel , otherDataSet , this . mergeConfig ) ; // マージ実行メッセージを出力 if ( logger . isDebugEnabled ( ) == true ) { logger . debug ( "Merge Executed. PartitionIndex=" + this . partitionIndex + ", MergeTransactionId=" + previousTxId ) ; } } } this . previousMergeTime = getCurrentTime ( ) ; } dataModels . add ( dataModel ) ; return dataModels ;
public class ServerProvisioningFeaturePack { /** * Creates a provisioning config file for each { @ link org . wildfly . build . common . model . ConfigFile } provided . * @ param featurePackFile * @ param configFiles * @ param configOverride * @ param configFileOverrides * @ return */ private static List < ConfigFile > createConfigFiles ( File featurePackFile , List < org . wildfly . build . common . model . ConfigFile > configFiles , ConfigOverride configOverride , Map < String , ConfigFileOverride > configFileOverrides ) { } }
final List < ConfigFile > result = new ArrayList < > ( ) ; if ( configOverride != null ) { if ( configFileOverrides != null && ! configFileOverrides . isEmpty ( ) ) { for ( org . wildfly . build . common . model . ConfigFile featurePackConfigFile : configFiles ) { ConfigFileOverride configFileOverride = configFileOverrides . get ( featurePackConfigFile . getOutputFile ( ) ) ; if ( configFileOverride != null ) { result . add ( new ConfigFile ( featurePackFile , featurePackConfigFile , configFileOverride ) ) ; } } } } else { for ( org . wildfly . build . common . model . ConfigFile featurePackConfigFile : configFiles ) { result . add ( new ConfigFile ( featurePackFile , featurePackConfigFile , null ) ) ; } } return result ;
public class CmsMove { /** * Returns the current name of the resource without path information . < p > * This is used to preset the input text field with the current resource name for single resource operations . < p > * @ return the current name of the resource without path information */ public String getCurrentResourceName ( ) { } }
if ( isMultiOperation ( ) ) { return "" ; } String resourceName = CmsResource . getName ( getParamResource ( ) ) ; if ( resourceName . endsWith ( "/" ) ) { resourceName = resourceName . substring ( 0 , resourceName . length ( ) - 1 ) ; } return resourceName ;
public class RemoteConsumerTransmit { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . SIMPDeliveryStreamSetTransmitControllable # forceFlushAtSource ( ) */ public void forceFlushAtSource ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "forceFlushAtSource" ) ; _aoh . forceFlushAtSource ( _aoStream . getRemoteMEUuid ( ) , _aoStream . getGatheringTargetDestUuid ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "forceFlushAtSource" ) ;
public class AuditActionContextCouchDbRepository { /** * Find audit records since + localDate + . * @ param localDate Date to search from . * @ return Audit records from after + localDate + . */ @ View ( name = "by_when_action_was_performed" , map = "function(doc) { if(doc.whenActionWasPerformed) { emit(doc.whenActionWasPerformed, doc) } }" ) public List < CouchDbAuditActionContext > findAuditRecordsSince ( final LocalDate localDate ) { } }
return db . queryView ( createQuery ( "by_when_action_was_performed" ) . startKey ( localDate ) . includeDocs ( true ) , CouchDbAuditActionContext . class ) ;
public class ChangeObjects { /** * method to replace the MonomerNotation having the MonomerID with the new * MonomerID * @ param monomerNotation * given monomer notation * @ param existingMonomerID * existing monomer id * @ param newMonomerID * new monomer id * @ return MonomerNotation , if it had the old MonomerID , null otherwise * @ throws NotationException * if new monomer id is not valid * @ throws ChemistryException * if chemistry engine can not be initialized * @ throws CTKException * if it contains an invalid smiles * @ throws MonomerLoadingException * if monomers can not be loaded */ public final static MonomerNotation replaceMonomerNotation ( MonomerNotation monomerNotation , String existingMonomerID , String newMonomerID ) throws NotationException , ChemistryException , CTKException , MonomerLoadingException { } }
/* Nucleotide */ if ( monomerNotation instanceof MonomerNotationUnitRNA ) { List < String > result = generateIDForNucleotide ( ( ( MonomerNotationUnitRNA ) monomerNotation ) , existingMonomerID , newMonomerID ) ; if ( result . get ( 1 ) != null ) { MonomerNotationUnitRNA newObject = new MonomerNotationUnitRNA ( result . get ( 0 ) , monomerNotation . getType ( ) ) ; newObject . setCount ( monomerNotation . getCount ( ) ) ; if ( monomerNotation . isAnnotationTrue ( ) ) { newObject . setAnnotation ( monomerNotation . getAnnotation ( ) ) ; } return newObject ; } } else if ( monomerNotation instanceof MonomerNotationUnit ) { /* Simple MonomerNotationUnit */ if ( monomerNotation . getUnit ( ) . equals ( existingMonomerID ) ) { return produceMonomerNotationUnitWithOtherID ( monomerNotation , newMonomerID ) ; } } else if ( monomerNotation instanceof MonomerNotationList ) { /* MonomerNotationList */ monomerNotation = replaceMonomerNotationList ( ( ( MonomerNotationList ) monomerNotation ) , existingMonomerID , newMonomerID ) ; if ( monomerNotation != null ) { return monomerNotation ; } } else if ( monomerNotation instanceof MonomerNotationGroup ) { /* MonomerNotatationGroup */ monomerNotation = replaceMonomerNotationGroup ( ( ( MonomerNotationGroup ) monomerNotation ) , existingMonomerID , newMonomerID ) ; if ( monomerNotation != null ) { return monomerNotation ; } } else { throw new NotationException ( "Unknown MonomerNotation Type " + monomerNotation . getClass ( ) ) ; } return null ;
public class SigningJobMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SigningJob signingJob , ProtocolMarshaller protocolMarshaller ) { } }
if ( signingJob == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( signingJob . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( signingJob . getSource ( ) , SOURCE_BINDING ) ; protocolMarshaller . marshall ( signingJob . getSignedObject ( ) , SIGNEDOBJECT_BINDING ) ; protocolMarshaller . marshall ( signingJob . getSigningMaterial ( ) , SIGNINGMATERIAL_BINDING ) ; protocolMarshaller . marshall ( signingJob . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( signingJob . getStatus ( ) , STATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Job { /** * Overrides from job properties . * @ see JobProperty # getJobOverrides */ public Collection < ? > getOverrides ( ) { } }
List < Object > r = new ArrayList < > ( ) ; for ( JobProperty < ? super JobT > p : properties ) r . addAll ( p . getJobOverrides ( ) ) ; return r ;
public class BaseX { /** * Updates { @ code min } and { @ code max } so that the range { @ code min . . max } * includes all values from { @ code chars } . * @ param chars * the list of characters to process . */ private void addMinMax ( String chars ) { } }
for ( int i = 0 ; i < chars . length ( ) ; i ++ ) { int c = chars . codePointAt ( i ) ; if ( min == - 1 || min > c ) { min = c ; } if ( max == - 1 || max < c ) { max = c ; } }
public class CPDefinitionLinkPersistenceImpl { /** * Returns the cp definition links before and after the current cp definition link in the ordered set where uuid = & # 63 ; . * @ param CPDefinitionLinkId the primary key of the current cp definition link * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp definition link * @ throws NoSuchCPDefinitionLinkException if a cp definition link with the primary key could not be found */ @ Override public CPDefinitionLink [ ] findByUuid_PrevAndNext ( long CPDefinitionLinkId , String uuid , OrderByComparator < CPDefinitionLink > orderByComparator ) throws NoSuchCPDefinitionLinkException { } }
CPDefinitionLink cpDefinitionLink = findByPrimaryKey ( CPDefinitionLinkId ) ; Session session = null ; try { session = openSession ( ) ; CPDefinitionLink [ ] array = new CPDefinitionLinkImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , cpDefinitionLink , uuid , orderByComparator , true ) ; array [ 1 ] = cpDefinitionLink ; array [ 2 ] = getByUuid_PrevAndNext ( session , cpDefinitionLink , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class Boss { /** * Add thread to managed list * @ param worker the supervised worker */ public void manage ( SupervisedWorker worker ) { } }
if ( worker == null ) throw new IllegalArgumentException ( "worker required in Boss.manage" ) ; worker . setSupervisor ( this ) ; this . workers . add ( worker ) ;
public class CPDefinitionVirtualSettingWrapper { /** * Sets the localized terms of use contents of this cp definition virtual setting from the map of locales and localized terms of use contents , and sets the default locale . * @ param termsOfUseContentMap the locales and localized terms of use contents of this cp definition virtual setting * @ param defaultLocale the default locale */ @ Override public void setTermsOfUseContentMap ( Map < java . util . Locale , String > termsOfUseContentMap , java . util . Locale defaultLocale ) { } }
_cpDefinitionVirtualSetting . setTermsOfUseContentMap ( termsOfUseContentMap , defaultLocale ) ;
public class Base64 { /** * Encode a Byte array and return the encoded string . * @ param in A string to encode . * @ return The encoded byte array . */ public static String encode ( Byte [ ] in ) { } }
byte [ ] tmp = new byte [ in . length ] ; for ( int i = 0 ; i < tmp . length ; i ++ ) { tmp [ i ] = in [ i ] ; } return encode ( tmp ) ;
public class FSDirectory { /** * Updates namespace and diskspace consumed for all * directories until the parent directory of file represented by path . * @ param path path for the file . * @ param inodes inode array representation of the path * @ param nsDelta the delta change of namespace * @ param dsDelta the delta change of diskspace * @ throws QuotaExceededException if the new count violates any quota limit * @ throws FileNotFound if path does not exist . */ void updateSpaceConsumed ( String path , INode [ ] inodes , long nsDelta , long dsDelta ) throws QuotaExceededException , FileNotFoundException { } }
writeLock ( ) ; try { if ( inodes == null ) { inodes = rootDir . getExistingPathINodes ( path ) ; } int len = inodes . length ; if ( inodes [ len - 1 ] == null ) { throw new FileNotFoundException ( path + " does not exist under rootDir." ) ; } updateCount ( inodes , len - 1 , nsDelta , dsDelta , true ) ; } finally { writeUnlock ( ) ; }
public class HttpSessionAdapter { /** * Method adapt * This method adapts a protocol agnostic session into one that conforms to a protocol such as HTTP . * @ see com . ibm . wsspi . session . IProtocolAdapter # adapt ( com . ibm . wsspi . session . ISession , Integer ) */ public Object adapt ( ISession session ) { } }
Object adaptation = session . getAdaptation ( ) ; if ( null == adaptation ) { adaptation = new HttpSessionImpl ( session ) ; session . setAdaptation ( adaptation ) ; } return adaptation ;
public class DataService { /** * Method to retrieve records for the given list of query * @ param query * the query string * @ return query result * @ throws FMSException * throws FMSException */ public QueryResult executeQuery ( String query ) throws FMSException { } }
IntuitMessage intuitMessage = prepareQuery ( query ) ; // execute interceptors executeInterceptors ( intuitMessage ) ; QueryResult queryResult = null ; // Iterate the IntuitObjects list in QueryResponse and convert to < T > entity IntuitResponse intuitResponse = ( IntuitResponse ) intuitMessage . getResponseElements ( ) . getResponse ( ) ; if ( intuitResponse != null ) { QueryResponse queryResponse = intuitResponse . getQueryResponse ( ) ; if ( queryResponse != null ) { queryResult = getQueryResult ( queryResponse ) ; } } return queryResult ;
public class WSJobRepositoryImpl { /** * { @ inheritDoc } */ @ Override public WSJobInstance getJobInstanceFromExecution ( long executionId ) throws NoSuchJobExecutionException , JobSecurityException { } }
long instanceId = persistenceManagerService . getJobInstanceIdFromExecutionId ( authorizedExecutionRead ( executionId ) ) ; return persistenceManagerService . getJobInstance ( instanceId ) ;
public class TransformerIdentityImpl { /** * Set the output properties for the transformation . These * properties will override properties set in the Templates * with xsl : output . * < p > If argument to this function is null , any properties * previously set are removed , and the value will revert to the value * defined in the templates object . < / p > * < p > Pass a qualified property key name as a two - part string , the namespace URI * enclosed in curly braces ( { } ) , followed by the local name . If the * name has a null URL , the String only contain the local name . An * application can safely check for a non - null URI by testing to see if the first * character of the name is a ' { ' character . < / p > * < p > For example , if a URI and local name were obtained from an element * defined with & lt ; xyz : foo xmlns : xyz = " http : / / xyz . foo . com / yada / baz . html " / & gt ; , * then the qualified name would be " { http : / / xyz . foo . com / yada / baz . html } foo " . Note that * no prefix is used . < / p > * @ param oformat A set of output properties that will be * used to override any of the same properties in affect * for the transformation . * @ see javax . xml . transform . OutputKeys * @ see java . util . Properties * @ throws IllegalArgumentException if any of the argument keys are not * recognized and are not namespace qualified . */ public void setOutputProperties ( Properties oformat ) throws IllegalArgumentException { } }
if ( null != oformat ) { // See if an * explicit * method was set . String method = ( String ) oformat . get ( OutputKeys . METHOD ) ; if ( null != method ) m_outputFormat = new OutputProperties ( method ) ; else m_outputFormat = new OutputProperties ( ) ; m_outputFormat . copyFrom ( oformat ) ; } else { // if oformat is null JAXP says that any props previously set are removed // and we are to revert back to those in the templates object ( i . e . Stylesheet ) . m_outputFormat = null ; }
public class FedoraEventImpl { /** * Convert a JCR Event to a FedoraEvent * @ param event the JCR Event * @ return a FedoraEvent */ public static FedoraEvent from ( final Event event ) { } }
requireNonNull ( event ) ; try { @ SuppressWarnings ( "unchecked" ) final Map < String , String > info = new HashMap < > ( event . getInfo ( ) ) ; final String userdata = event . getUserData ( ) ; try { if ( userdata != null && ! userdata . isEmpty ( ) ) { final JsonNode json = MAPPER . readTree ( userdata ) ; if ( json . has ( BASE_URL ) ) { String url = json . get ( BASE_URL ) . asText ( ) ; while ( url . endsWith ( "/" ) ) { url = url . substring ( 0 , url . length ( ) - 1 ) ; } info . put ( BASE_URL , url ) ; } if ( json . has ( USER_AGENT ) ) { info . put ( USER_AGENT , json . get ( USER_AGENT ) . asText ( ) ) ; } } else { LOGGER . debug ( "Event UserData is empty!" ) ; } } catch ( final IOException ex ) { LOGGER . warn ( "Error extracting user data: " + userdata , ex . getMessage ( ) ) ; } final Set < String > resourceTypes = getResourceTypes ( event ) . collect ( toSet ( ) ) ; return new FedoraEventImpl ( valueOf ( event . getType ( ) ) , cleanPath ( event ) , resourceTypes , event . getUserID ( ) , FedoraSessionUserUtil . getUserURI ( event . getUserID ( ) ) , ofEpochMilli ( event . getDate ( ) ) , info ) ; } catch ( final RepositoryException ex ) { throw new RepositoryRuntimeException ( "Error converting JCR Event to FedoraEvent" , ex ) ; }
public class MediaTypeParser { /** * Converts a string such as " text / plain " into a MediaType object . * @ param value The value to parse * @ return A MediaType object */ public static MediaType fromString ( String value ) { } }
if ( value == null ) { throw new NullPointerException ( "value" ) ; } List < ParameterizedHeaderWithValue > headerValues = ParameterizedHeaderWithValue . fromString ( value ) ; if ( headerValues . isEmpty ( ) ) { throw new IllegalArgumentException ( "The value '" + value + "' did not contain a valid header value" ) ; } ParameterizedHeaderWithValue v = headerValues . get ( 0 ) ; String [ ] split = v . value ( ) . split ( "/" ) ; if ( split . length != 2 ) { throw new IllegalArgumentException ( "Media types must be in the format 'type/subtype'; this is inavlid: '" + v . value ( ) + "'" ) ; } return new MediaType ( split [ 0 ] , split [ 1 ] , v . parameters ( ) ) ;
public class AbatisService { /** * Default DB file nameを利用する外部Constructor * @ param context * 呼び出し元Contextオブジェクト * @ param dbName * 生成するDB file name */ protected static AbatisService getInstance ( Context context , int version ) { } }
if ( instance == null ) { instance = new AbatisService ( context , version ) ; } return instance ;
public class CmsXmlSitemapActionElement { /** * Constructs an XML sitemap generator given an XML sitemap configuration file . < p > * @ param seoFileRes the sitemap XML file * @ param config the parsed configuration * @ return the sitemap generator , or null if the given configuration is not an XML sitemap configuration * @ throws CmsException if something goes wrong */ public static CmsXmlSitemapGenerator prepareSitemapGenerator ( CmsResource seoFileRes , CmsXmlSeoConfiguration config ) throws CmsException { } }
if ( config . getMode ( ) . equals ( CmsXmlSeoConfiguration . MODE_XML_SITEMAP ) ) { String baseFolderRootPath = CmsFileUtil . removeTrailingSeparator ( CmsResource . getParentFolder ( seoFileRes . getRootPath ( ) ) ) ; CmsXmlSitemapGenerator xmlSitemapGenerator = createSitemapGenerator ( config . getSitemapGeneratorClassName ( ) , baseFolderRootPath ) ; xmlSitemapGenerator . setComputeContainerPageDates ( config . shouldComputeContainerPageModificationDates ( ) ) ; CmsPathIncludeExcludeSet inexcludeSet = xmlSitemapGenerator . getIncludeExcludeSet ( ) ; for ( String include : config . getIncludes ( ) ) { inexcludeSet . addInclude ( include ) ; } for ( String exclude : config . getExcludes ( ) ) { inexcludeSet . addExclude ( exclude ) ; } xmlSitemapGenerator . setServerUrl ( config . getServerUrl ( ) ) ; return xmlSitemapGenerator ; } return null ;
public class SpotifyApi { /** * Get a playlist . * @ deprecated Playlist IDs are unique for themselves . This parameter is thus no longer used . * ( https : / / developer . spotify . com / community / news / 2018/06/12 / changes - to - playlist - uris / ) * @ param user _ id The playlists owners username . * @ param playlist _ id The playlists ID . * @ return A { @ link GetPlaylistRequest . Builder } . * @ see < a href = " https : / / developer . spotify . com / web - api / user - guide / # spotify - uris - and - ids " > Spotify : URLs & amp ; IDs < / a > */ @ Deprecated public GetPlaylistRequest . Builder getPlaylist ( String user_id , String playlist_id ) { } }
return new GetPlaylistRequest . Builder ( accessToken ) . setDefaults ( httpManager , scheme , host , port ) . user_id ( user_id ) . playlist_id ( playlist_id ) ;
public class XMLUnit { /** * Utility method to build a Document using a specific DocumentBuilder * and reading characters from a specific Reader . * @ param withBuilder * @ param fromReader * @ return Document built * @ throws SAXException * @ throws IOException */ public static Document buildDocument ( DocumentBuilder withBuilder , Reader fromReader ) throws SAXException , IOException { } }
return buildDocument ( withBuilder , new InputSource ( fromReader ) ) ;
public class StubObject { /** * Create a { @ link StubObject } using the provided user ID and a new object ID * @ param sUserID * User ID * @ param aCustomAttrs * Custom attributes . May be < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull public static StubObject createForUser ( @ Nullable final String sUserID , @ Nullable final Map < String , String > aCustomAttrs ) { } }
return new StubObject ( GlobalIDFactory . getNewPersistentStringID ( ) , sUserID , aCustomAttrs ) ;
public class XDSRepositoryAuditor { /** * Get an instance of the XDS Document Repository Auditor from the * global context * @ return XDS Document Repository Auditor instance */ public static XDSRepositoryAuditor getAuditor ( ) { } }
AuditorModuleContext ctx = AuditorModuleContext . getContext ( ) ; return ( XDSRepositoryAuditor ) ctx . getAuditor ( XDSRepositoryAuditor . class ) ;
public class BandwidthGroupTargeting { /** * Gets the bandwidthGroups value for this BandwidthGroupTargeting . * @ return bandwidthGroups * The bandwidth groups that are being targeted or excluded by * the * { @ link LineItem } . */ public com . google . api . ads . admanager . axis . v201808 . Technology [ ] getBandwidthGroups ( ) { } }
return bandwidthGroups ;
public class CustomMatchingStrategy { /** * Gets the collection match for the web resource collection based on the following custom method algorithm . * < pre > * Custom method matching use case . * Happy path : * 1 . Validate the resource name matches one of the URL patterns * 2 . Validate the method matches * 3 . Return the collection match found * Exceptional path : * 1 . a If resource name does not match , return RESPONSE _ NO _ MATCH . * 2 . a If method does not match , determine that it is listed and return RESPONSE _ NO _ MATCH . * 2 . b When method is not listed , the match is null and it is processed by method getMatchResponse turning it into a CUSTOM _ NO _ MATCH _ RESPONSE . * < / pre > */ @ Override protected CollectionMatch getCollectionMatchForWebResourceCollection ( WebResourceCollection webResourceCollection , String resourceName , String method ) { } }
CollectionMatch match = null ; CollectionMatch collectionMatchFound = webResourceCollection . performUrlMatch ( resourceName ) ; if ( collectionMatchFound != null ) { if ( webResourceCollection . isMethodMatched ( method ) ) { match = collectionMatchFound ; } else if ( webResourceCollection . isMethodListed ( method ) ) { match = CollectionMatch . RESPONSE_NO_MATCH ; } } else { match = CollectionMatch . RESPONSE_NO_MATCH ; } return match ;
public class DateConverter { /** * Converts an { @ link Object } of { @ link Class type S } into an { @ link Object } of { @ link Class type T } . * @ param value { @ link Object } of { @ link Class type S } to convert . * @ return the converted { @ link Object } of { @ link Class type T } . * @ throws ConversionException if the { @ link Object } cannot be converted . * @ see org . cp . elements . data . conversion . ConversionService # convert ( Object , Class ) * @ see # convert ( Object , Class ) */ @ Override public Date convert ( Object value ) { } }
if ( value instanceof Calendar ) { return ( ( Calendar ) value ) . getTime ( ) ; } else if ( value instanceof Date ) { return ( Date ) value ; } else if ( value instanceof Number ) { return new Date ( ( ( Number ) value ) . longValue ( ) ) ; } else if ( value instanceof String ) { String valueString = String . valueOf ( value ) . trim ( ) ; try { return StringUtils . isDigits ( valueString ) ? new Date ( Long . parseLong ( valueString ) ) : new Date ( getDateFormat ( ) . parse ( valueString ) . getTime ( ) ) ; } catch ( NumberFormatException | ParseException cause ) { throw newConversionException ( cause , "[%s] is not a valid date/time" , value ) ; } } else { return super . convert ( value ) ; }
public class MultiPolylineMarkers { /** * Is it deleted * @ return */ public boolean isDeleted ( ) { } }
boolean deleted = true ; for ( PolylineMarkers polyline : polylineMarkers ) { deleted = polyline . isDeleted ( ) ; if ( ! deleted ) { break ; } } return deleted ;
public class JNvgraph { /** * Allocate numsets vectors of size E reprensenting Edge Data and attached them the graph . * settypes [ i ] is the type of vector # i , currently all Vertex and Edge data should have the same type */ public static int nvgraphAllocateEdgeData ( nvgraphHandle handle , nvgraphGraphDescr descrG , long numsets , Pointer settypes ) { } }
return checkResult ( nvgraphAllocateEdgeDataNative ( handle , descrG , numsets , settypes ) ) ;
public class EcoreGeneratorFragment { /** * Use { @ link GenModelAccess # getGenPackage ( EPackage , ResourceSet ) } */ @ Deprecated protected List < GenPackage > loadReferencedGenModels ( ResourceSet rs ) { } }
List < GenPackage > result = Lists . newArrayList ( ) ; if ( getReferencedGenModels ( ) != null ) { for ( String uri : getReferencedGenModels ( ) . split ( "," ) ) { try { Resource resource = rs . getResource ( URI . createURI ( uri . trim ( ) ) , true ) ; GenModel genmodel = ( GenModel ) resource . getContents ( ) . get ( 0 ) ; EList < GenPackage > genPackages = genmodel . getGenPackages ( ) ; for ( GenPackage genPackage : genPackages ) { genPackage . getEcorePackage ( ) . getEClassifiers ( ) ; result . add ( genPackage ) ; } } catch ( Exception e ) { log . error ( "Couldn't find genmodel for uri '" + uri + "'" ) ; throw new WrappedException ( e ) ; } } } return result ;
public class ProtoBufBuilderProcessor { /** * Method adds a new default message instance to the repeated field and return it ' s builder instance . * @ param repeatedFieldDescriptor The field descriptor of the repeated field . * @ param builder The builder instance of the message which contains the repeated field . * @ return The builder instance of the new added message is returned . * @ throws CouldNotPerformException */ public static Message . Builder addDefaultInstanceToRepeatedField ( final Descriptors . FieldDescriptor repeatedFieldDescriptor , final Message . Builder builder ) throws CouldNotPerformException { } }
if ( repeatedFieldDescriptor == null ) { throw new NotAvailableException ( "repeatedFieldDescriptor" ) ; } return addDefaultInstanceToRepeatedField ( repeatedFieldDescriptor . getName ( ) , builder ) ;
public class MVQueryRewriter { /** * JSON deserializers - - pretend that deserialization will never fail */ private static AbstractExpression predicate_of ( MaterializedViewInfo mv ) { } }
try { return AbstractExpression . fromJSONString ( Encoder . hexDecodeToString ( mv . getPredicate ( ) ) , null ) ; } catch ( JSONException e ) { return null ; }
public class ReadOnlyUtils { /** * Extracts the version id from a string * @ param versionDir The string * @ return Returns the version id of the directory , else - 1 */ private static long getVersionId ( String versionDir ) { } }
try { return Long . parseLong ( versionDir . replace ( "version-" , "" ) ) ; } catch ( NumberFormatException e ) { logger . trace ( "Cannot parse version directory to obtain id " + versionDir ) ; return - 1 ; }
public class ShakeAroundAPI { /** * 批量查询设备统计数据接口 * @ param accessToken accessToken * @ param statisticsDeviceList statisticsDeviceList * @ return result */ public static StatisticsDeviceListResult statisticsDeviceList ( String accessToken , StatisticsDeviceList statisticsDeviceList ) { } }
return statisticsDeviceList ( accessToken , JsonUtil . toJSONString ( statisticsDeviceList ) ) ;
public class vpnclientlessaccesspolicy { /** * Use this API to add vpnclientlessaccesspolicy . */ public static base_response add ( nitro_service client , vpnclientlessaccesspolicy resource ) throws Exception { } }
vpnclientlessaccesspolicy addresource = new vpnclientlessaccesspolicy ( ) ; addresource . name = resource . name ; addresource . rule = resource . rule ; addresource . profilename = resource . profilename ; return addresource . add_resource ( client ) ;
public class LinearClassifier { /** * Returns number of features with weight above a certain threshold * ( across all labels ) * @ param threshold Threshold above which we will count the feature * @ param useMagnitude Whether the notion of " large " should ignore * the sign of the feature weight . * @ return number of features satisfying the specified conditions */ public int getFeatureCount ( double threshold , boolean useMagnitude ) { } }
int n = 0 ; for ( int feat = 0 ; feat < weights . length ; feat ++ ) { for ( int lab = 0 ; lab < weights [ feat ] . length ; lab ++ ) { double thisWeight = ( useMagnitude ) ? Math . abs ( weights [ feat ] [ lab ] ) : weights [ feat ] [ lab ] ; if ( thisWeight > threshold ) { n ++ ; } } } return n ;
public class OAuth20Utils { /** * Write to the output this error . * @ param response the response * @ param error error message * @ return json - backed view . */ public static ModelAndView writeError ( final HttpServletResponse response , final String error ) { } }
val model = CollectionUtils . wrap ( OAuth20Constants . ERROR , error ) ; val mv = new ModelAndView ( new MappingJackson2JsonView ( MAPPER ) , ( Map ) model ) ; mv . setStatus ( HttpStatus . BAD_REQUEST ) ; response . setStatus ( HttpStatus . BAD_REQUEST . value ( ) ) ; return mv ;
public class GradleToolingHelper { /** * Translate the given { @ link DependencyDescriptor } in to an instance of { @ link ArtifactSpec } . */ public static ArtifactSpec toArtifactSpec ( DependencyDescriptor descriptor ) { } }
return new ArtifactSpec ( descriptor . getScope ( ) , descriptor . getGroup ( ) , descriptor . getName ( ) , descriptor . getVersion ( ) , descriptor . getType ( ) , descriptor . getClassifier ( ) , descriptor . getFile ( ) ) ;
public class MessageProcessInfoManualField { /** * Set up the default screen control for this field . * @ param itsLocation Location of this component on screen ( ie . , GridBagConstraint ) . * @ param targetScreen Where to place this component ( ie . , Parent screen or GridBagLayout ) . * @ param converter The converter to set the screenfield to . * @ param iDisplayFieldDesc Display the label ? ( optional ) . * @ param properties Extra properties * @ return Return the component or ScreenField that is created for this field . */ public ScreenComponent setupDefaultView ( ScreenLoc itsLocation , ComponentParent targetScreen , Convert converter , int iDisplayFieldDesc , Map < String , Object > properties ) { } }
ScreenComponent screenField = super . setupDefaultView ( itsLocation , targetScreen , converter , iDisplayFieldDesc , properties ) ; for ( int i = 0 ; ; i ++ ) { Object comp = converter . getField ( ) . getComponent ( i ) ; if ( comp == null ) break ; if ( comp instanceof ScreenComponent ) { ( ( ScreenComponent ) comp ) . free ( ) ; i -- ; } } Record record = this . makeReferenceRecord ( ) ; properties = new HashMap < String , Object > ( ) ; properties . put ( ScreenModel . RECORD , record ) ; properties . put ( ScreenModel . COMMAND , ThinMenuConstants . LOOKUP ) ; properties . put ( ScreenModel . IMAGE , ThinMenuConstants . LOOKUP ) ; createScreenComponent ( ScreenModel . CANNED_BOX , targetScreen . getNextLocation ( ScreenConstants . RIGHT_OF_LAST , ScreenConstants . DONT_SET_ANCHOR ) , targetScreen , converter , ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties ) ; /* new SSelectBox ( targetScreen . getNextLocation ( ScreenConstants . RIGHT _ OF _ LAST , ScreenConstants . DONT _ SET _ ANCHOR ) , targetScreen , converter , ScreenConstants . DONT _ DISPLAY _ DESC , record ) public boolean doCommand ( String strCommand , ScreenField sourceSField , int iCommandOptions ) boolean bHandled = false ; if ( ThinMenuConstants . LOOKUP . equalsIgnoreCase ( strCommand ) ) if ( this . getParentScreen ( ) ! = null ) / / Give the parent screen a shot at it . bHandled = this . getParentScreen ( ) . handleCommand ( LOOKUP _ WITH _ PARAMS , sourceSField , iCommandOptions ) ; if ( ! bHandled ) bHandled = super . doCommand ( strCommand , sourceSField , iCommandOptions ) ; return bHandled ; */ properties = new HashMap < String , Object > ( ) ; properties . put ( ScreenModel . FIELD , this ) ; properties . put ( ScreenModel . COMMAND , ScreenModel . CLEAR ) ; properties . put ( ScreenModel . IMAGE , ScreenModel . CLEAR ) ; createScreenComponent ( ScreenModel . CANNED_BOX , targetScreen . getNextLocation ( ScreenConstants . RIGHT_OF_LAST , ScreenConstants . DONT_SET_ANCHOR ) , targetScreen , converter , ScreenConstants . DONT_DISPLAY_FIELD_DESC , properties ) ; // new SCannedBox ( targetScreen . getNextLocation ( ScreenConstants . RIGHT _ OF _ LAST , ScreenConstants . DONT _ SET _ ANCHOR ) , targetScreen , converter , SCannedBox . CLEAR , ScreenConstants . DONT _ DISPLAY _ FIELD _ DESC , this ) ; return screenField ;
public class IterUtil { /** * 字段值与列表值对应的Map , 常用于元素对象中有唯一ID时需要按照这个ID查找对象的情况 < br > * 例如 : 车牌号 = 》 车 * @ param < K > 字段名对应值得类型 , 不确定请使用Object * @ param < V > 对象类型 * @ param iter 对象列表 * @ param fieldName 字段名 ( 会通过反射获取其值 ) * @ return 某个字段值与对象对应Map * @ since 4.0.4 */ @ SuppressWarnings ( "unchecked" ) public static < K , V > Map < K , V > fieldValueMap ( Iterator < V > iter , String fieldName ) { } }
final Map < K , V > result = new HashMap < > ( ) ; if ( null != iter ) { V value ; while ( iter . hasNext ( ) ) { value = iter . next ( ) ; result . put ( ( K ) ReflectUtil . getFieldValue ( value , fieldName ) , value ) ; } } return result ;
public class JMThread { /** * Start with single executor service executor service . * @ param message the message * @ param runnable the runnable * @ return the executor service */ public static ExecutorService startWithSingleExecutorService ( String message , Runnable runnable ) { } }
return startWithExecutorService ( newSingleThreadPool ( ) , message , runnable ) ;
public class DictTerm { /** * setter for DictCanon - sets canonical form * @ generated * @ param v value to set into the feature */ public void setDictCanon ( String v ) { } }
if ( DictTerm_Type . featOkTst && ( ( DictTerm_Type ) jcasType ) . casFeat_DictCanon == null ) jcasType . jcas . throwFeatMissing ( "DictCanon" , "org.apache.uima.conceptMapper.DictTerm" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( DictTerm_Type ) jcasType ) . casFeatCode_DictCanon , v ) ;
public class ResourceDataSyncItemMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ResourceDataSyncItem resourceDataSyncItem , ProtocolMarshaller protocolMarshaller ) { } }
if ( resourceDataSyncItem == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( resourceDataSyncItem . getSyncName ( ) , SYNCNAME_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getS3Destination ( ) , S3DESTINATION_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getLastSyncTime ( ) , LASTSYNCTIME_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getLastSuccessfulSyncTime ( ) , LASTSUCCESSFULSYNCTIME_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getLastStatus ( ) , LASTSTATUS_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getSyncCreatedTime ( ) , SYNCCREATEDTIME_BINDING ) ; protocolMarshaller . marshall ( resourceDataSyncItem . getLastSyncStatusMessage ( ) , LASTSYNCSTATUSMESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DataSiftManagedSource { /** * Create a new managed source * @ param name the name of the source * @ param source the source and its configuratiosn * @ return this */ public < T extends DataSource > FutureData < ManagedSource > create ( String name , T source ) { } }
return updateOrCreate ( name , source , null ) ;
public class EsUtil { /** * Changes the givenm alias to refer tot eh supplied index name * @ param index the index we were building * @ param alias to refer to this index * @ return 0 fir ok < 0 for not ok * @ throws IndexException */ public int swapIndex ( final String index , final String alias ) throws IndexException { } }
// IndicesAliasesResponse resp = null ; try { /* index is the index we were just indexing into */ final IndicesAdminClient idx = getAdminIdx ( ) ; final GetAliasesRequestBuilder igarb = idx . prepareGetAliases ( alias ) ; final ActionFuture < GetAliasesResponse > getAliasesAf = idx . getAliases ( igarb . request ( ) ) ; final GetAliasesResponse garesp = getAliasesAf . actionGet ( ) ; final ImmutableOpenMap < String , List < AliasMetaData > > aliasesmeta = garesp . getAliases ( ) ; final IndicesAliasesRequestBuilder iarb = idx . prepareAliases ( ) ; final Iterator < String > it = aliasesmeta . keysIt ( ) ; while ( it . hasNext ( ) ) { final String indexName = it . next ( ) ; for ( final AliasMetaData amd : aliasesmeta . get ( indexName ) ) { if ( amd . getAlias ( ) . equals ( alias ) ) { iarb . removeAlias ( indexName , alias ) ; } } } iarb . addAlias ( index , alias ) ; final ActionFuture < IndicesAliasesResponse > af = idx . aliases ( iarb . request ( ) ) ; /* resp = */ af . actionGet ( ) ; return 0 ; } catch ( final ElasticsearchException ese ) { // Failed somehow error ( ese ) ; return - 1 ; } catch ( final IndexException ie ) { throw ie ; } catch ( final Throwable t ) { throw new IndexException ( t ) ; }
public class ExposeLinearLayoutManagerEx { /** * < p > Scroll the RecyclerView to make the position visible . < / p > * < p > RecyclerView will scroll the minimum amount that is necessary to make the * target position visible . If you are looking for a similar behavior to * { @ link android . widget . ListView # setSelection ( int ) } or * { @ link android . widget . ListView # setSelectionFromTop ( int , int ) } , use * { @ link # scrollToPositionWithOffset ( int , int ) } . < / p > * < p > Note that scroll position change will not be reflected until the next layout call . < / p > * @ param position Scroll to this adapter position * @ see # scrollToPositionWithOffset ( int , int ) */ @ Override public void scrollToPosition ( int position ) { } }
mCurrentPendingScrollPosition = position ; mPendingScrollPositionOffset = INVALID_OFFSET ; if ( mCurrentPendingSavedState != null ) { mCurrentPendingSavedState . putInt ( "AnchorPosition" , RecyclerView . NO_POSITION ) ; } requestLayout ( ) ;
public class DeviceInfo { public static void main ( String [ ] args ) { } }
if ( args . length == 0 ) { System . out . println ( "Device name ?" ) ; System . exit ( 0 ) ; } try { String devname = args [ 0 ] ; Database db = ApiUtil . get_db_obj ( ) ; DeviceInfo info = db . get_device_info ( devname ) ; System . out . println ( info ) ; } catch ( DevFailed e ) { if ( args . length < 2 || args [ 1 ] . equals ( "-no_exception" ) == false ) fr . esrf . TangoDs . Except . print_exception ( e ) ; System . exit ( 1 ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; System . exit ( 1 ) ; } System . exit ( 0 ) ;
public class IPAddressDivision { /** * Produces a normalized string to represent the segment . * If the segment CIDR prefix length covers the range , then it is assumed to be a CIDR , and the string has only the lower value of the CIDR range . * Otherwise , the explicit range will be printed . * @ return */ @ Override public String getString ( ) { } }
String result = cachedString ; if ( result == null ) { synchronized ( this ) { result = cachedString ; if ( result == null ) { if ( isSinglePrefixBlock ( ) || ! isMultiple ( ) ) { // covers the case of ! isMultiple , ie single addresses , when there is no prefix or the prefix is the bit count result = getDefaultLowerString ( ) ; } else if ( isFullRange ( ) ) { result = IPAddress . SEGMENT_WILDCARD_STR ; } else { long upperValue = getUpperDivisionValue ( ) ; if ( isPrefixBlock ( ) ) { upperValue &= getDivisionNetworkMask ( getDivisionPrefixLength ( ) ) ; } result = getDefaultRangeString ( getDivisionValue ( ) , upperValue , getDefaultTextualRadix ( ) ) ; } cachedString = result ; } } } return result ;
public class InternalXtextParser { /** * InternalXtext . g : 3372:1 : ruleUntilToken returns [ EObject current = null ] : ( otherlv _ 0 = ' - > ' ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) ) ; */ public final EObject ruleUntilToken ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_0 = null ; EObject lv_terminal_1_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 3378:2 : ( ( otherlv _ 0 = ' - > ' ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) ) ) // InternalXtext . g : 3379:2 : ( otherlv _ 0 = ' - > ' ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) ) { // InternalXtext . g : 3379:2 : ( otherlv _ 0 = ' - > ' ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) ) // InternalXtext . g : 3380:3 : otherlv _ 0 = ' - > ' ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) { otherlv_0 = ( Token ) match ( input , 43 , FollowSets000 . FOLLOW_47 ) ; newLeafNode ( otherlv_0 , grammarAccess . getUntilTokenAccess ( ) . getHyphenMinusGreaterThanSignKeyword_0 ( ) ) ; // InternalXtext . g : 3384:3 : ( ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) ) // InternalXtext . g : 3385:4 : ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) { // InternalXtext . g : 3385:4 : ( lv _ terminal _ 1_0 = ruleTerminalTokenElement ) // InternalXtext . g : 3386:5 : lv _ terminal _ 1_0 = ruleTerminalTokenElement { newCompositeNode ( grammarAccess . getUntilTokenAccess ( ) . getTerminalTerminalTokenElementParserRuleCall_1_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; lv_terminal_1_0 = ruleTerminalTokenElement ( ) ; state . _fsp -- ; if ( current == null ) { current = createModelElementForParent ( grammarAccess . getUntilTokenRule ( ) ) ; } set ( current , "terminal" , lv_terminal_1_0 , "org.eclipse.xtext.Xtext.TerminalTokenElement" ) ; afterParserOrEnumRuleCall ( ) ; } } } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class LocationNumberImpl { /** * makes checks on APRI - see NOTE to APRI in Q . 763 , p 23 */ protected void doAddressPresentationRestricted ( ) { } }
if ( this . addressRepresentationRestrictedIndicator != _APRI_NOT_AVAILABLE ) return ; // NOTE 1 If the parameter is included and the address presentation // restricted indicator indicates // address not available , octets 3 to n ( this are digits . ) are omitted , // the subfields in items a - odd / evem , b - nai , c - ni and d - npi , are // coded with // 0 ' s , and the subfield f - filler , is coded with 11. this . oddFlag = 0 ; this . natureOfAddresIndicator = 0 ; this . numberingPlanIndicator = 0 ; this . internalNetworkNumberIndicator = 0 ; this . screeningIndicator = _SI_NETWORK_PROVIDED ; this . setAddress ( "" ) ;
public class ModelsSupporter { /** * Calculate the drainage direction factor ( is used in some horton machine like pitfiller , * flow , . . . ) * Is the distance betwen the central pixel , in a 3x3 kernel , and the neighboured pixels . * @ param dx is the resolution of a raster map in the x direction . * @ param dy is the resolution of the raster map in the y direction . * @ return < b > fact < / b > the direction factor or 1 / lenght where lenght is the distance of the * pixel from the central poxel . */ public static double [ ] calculateDirectionFactor ( double dx , double dy ) { } }
// direction factor , where the components are 1 / length double [ ] fact = new double [ 9 ] ; for ( int k = 1 ; k <= 8 ; k ++ ) { fact [ k ] = 1.0 / ( Math . sqrt ( DIR [ k ] [ 0 ] * dy * DIR [ k ] [ 0 ] * dy + DIR [ k ] [ 1 ] * DIR [ k ] [ 1 ] * dx * dx ) ) ; } return fact ;
public class ConfigurationUtils { /** * Saves the instances into a file . * @ param app the application ( not null ) * @ param configurationDirectory the configuration directory */ public static void saveInstances ( Application app ) { } }
File targetFile = new File ( app . getDirectory ( ) , Constants . PROJECT_DIR_INSTANCES + "/" + INSTANCES_FILE ) ; try { Utils . createDirectory ( targetFile . getParentFile ( ) ) ; RuntimeModelIo . writeInstances ( targetFile , app . getRootInstances ( ) ) ; } catch ( IOException e ) { Logger logger = Logger . getLogger ( ConfigurationUtils . class . getName ( ) ) ; logger . severe ( "Failed to save instances. " + e . getMessage ( ) ) ; Utils . logException ( logger , e ) ; }
public class TCAbortMessageImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # encode ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encode ( AsnOutputStream aos ) throws EncodeException { } }
try { aos . writeTag ( Tag . CLASS_APPLICATION , false , _TAG ) ; int pos = aos . StartContentDefiniteLength ( ) ; aos . writeOctetString ( Tag . CLASS_APPLICATION , _TAG_DTX , this . destTxId ) ; if ( this . type != null ) aos . writeInteger ( Tag . CLASS_APPLICATION , _TAG_P , this . type . getType ( ) ) ; else if ( this . dp != null ) this . dp . encode ( aos ) ; aos . FinalizeContent ( pos ) ; } catch ( IOException e ) { throw new EncodeException ( "IOException while encoding TC-Abort: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new EncodeException ( "AsnException while encoding TC-Abort: " + e . getMessage ( ) , e ) ; }
public class FilterDriver { /** * parameter setting . * @ param name parameter name * @ param value Enum parameter value */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public void setParameter ( String name , Enum value ) { conf . setEnum ( name , value ) ;
public class ViewRecycler { /** * Inflates the view , which is used to visualize a specific item . * @ param item * The item , which should be visualized by the inflated view , as an instance of the * generic type ItemType . The item may not be null * @ param parent * The parent of the inflated view as an instance of the class { @ link ViewGroup } or * null , if no parent is available * @ param useCache * True , if an unused view should retrieved from the cache , if possible , false , if a new * instance should be inflated instead * @ param params * An array , which may contain optional parameters , as an array of the generic type * ParamType or an empty array , if no optional parameters are available * @ return A pair , which contains the view , which is used to visualize the given item , as well * as a boolean value , which indicates , whether a new view has been inflated , or if an unused * view has been reused from the cache , as an instance of the class Pair . The pair may not be * null */ @ SafeVarargs @ NonNull public final Pair < View , Boolean > inflate ( @ NonNull final ItemType item , @ Nullable final ViewGroup parent , final boolean useCache , @ NonNull final ParamType ... params ) { } }
Condition . INSTANCE . ensureNotNull ( params , "The array may not be null" ) ; Condition . INSTANCE . ensureNotNull ( getAdapter ( ) , "No adapter has been set" , IllegalStateException . class ) ; View view = getView ( item ) ; boolean inflated = false ; if ( view == null ) { int viewType = getAdapter ( ) . getViewType ( item ) ; if ( useCache ) { view = pollUnusedView ( viewType ) ; } if ( view == null ) { view = getAdapter ( ) . onInflateView ( getLayoutInflater ( ) , parent , item , viewType , params ) ; inflated = true ; getLogger ( ) . logInfo ( getClass ( ) , "Inflated view to visualize item " + item + " using view type " + viewType ) ; } else { getLogger ( ) . logInfo ( getClass ( ) , "Reusing view to visualize item " + item + " using view type " + viewType ) ; } getActiveViews ( ) . put ( item , view ) ; } getAdapter ( ) . onShowView ( getContext ( ) , view , item , inflated , params ) ; getLogger ( ) . logDebug ( getClass ( ) , "Updated view of item " + item ) ; return Pair . create ( view , inflated ) ;
public class AlertService { /** * Updates an existing trigger . * @ param alertId The ID of the alert owning the trigger . * @ param triggerId The ID of the trigger to update . * @ param trigger The updated trigger information . * @ return The updated trigger . * @ throws IOException If the server cannot be reached . * @ throws TokenExpiredException If the token sent along with the request has expired */ public Trigger updateTrigger ( BigInteger alertId , BigInteger triggerId , Trigger trigger ) throws IOException , TokenExpiredException { } }
String requestUrl = RESOURCE + "/" + alertId . toString ( ) + "/triggers/" + triggerId . toString ( ) ; ArgusResponse response = getClient ( ) . executeHttpRequest ( ArgusHttpClient . RequestType . PUT , requestUrl , trigger ) ; assertValidResponse ( response , requestUrl ) ; return fromJson ( response . getResult ( ) , Trigger . class ) ;
public class LdapUtils { /** * Execute search operation response . * @ param connectionFactory the connection factory * @ param baseDn the base dn * @ param filter the filter * @ return the response * @ throws LdapException the ldap exception */ public static Response < SearchResult > executeSearchOperation ( final ConnectionFactory connectionFactory , final String baseDn , final SearchFilter filter ) throws LdapException { } }
return executeSearchOperation ( connectionFactory , baseDn , filter , ReturnAttributes . ALL_USER . value ( ) , ReturnAttributes . ALL_USER . value ( ) ) ;
public class DirectoryWatcher { /** * Close the watch service . Releases resources . After calling , this instance becomes invalid and can ' t be used any more . */ public void stopWatching ( ) { } }
try { _watchService . close ( ) ; _watchService = null ; _watchedDirectories = null ; } catch ( IOException e ) { throw new RuntimeException ( "Could not stop watching directories!" , e ) ; }
public class AbstractJcrNode { /** * Get the property definition ID . * @ return the cached property definition ID ; never null * @ throws ItemNotFoundException if the node that contains this property doesn ' t exist anymore * @ throws ConstraintViolationException if no valid property definition could be found * @ throws RepositoryException if there is a problem with this repository */ NodeDefinitionId nodeDefinitionId ( ) throws ItemNotFoundException , ConstraintViolationException , RepositoryException { } }
CachedDefinition defn = cachedDefn ; NodeTypes nodeTypes = session ( ) . nodeTypes ( ) ; if ( defn == null || nodeTypes . getVersion ( ) > defn . nodeTypesVersion ) { assert ! this . isRoot ( ) ; // Determine the node type based upon this node ' s type information . . . CachedNode parent = getParent ( ) . node ( ) ; SessionCache cache = sessionCache ( ) ; Name nodeName = name ( ) ; Name primaryType = node ( ) . getPrimaryType ( cache ) ; Name parentPrimaryType = parent . getPrimaryType ( cache ) ; Set < Name > parentMixins = parent . getMixinTypes ( cache ) ; SiblingCounter siblingCounter = SiblingCounter . create ( parent , cache ) ; boolean skipProtected = true ; NodeDefinitionSet childDefns = nodeTypes . findChildNodeDefinitions ( parentPrimaryType , parentMixins ) ; JcrNodeDefinition childDefn = childDefns . findBestDefinitionForChild ( nodeName , primaryType , skipProtected , siblingCounter ) ; if ( childDefn == null ) { throw new ConstraintViolationException ( JcrI18n . noChildNodeDefinition . text ( nodeName , getParent ( ) . location ( ) , readable ( parentPrimaryType ) , readable ( parentMixins ) ) ) ; } NodeDefinitionId id = childDefn . getId ( ) ; setNodeDefinitionId ( id , nodeTypes . getVersion ( ) ) ; return id ; } return defn . nodeDefnId ;
public class Repeater { /** * Assigns property names * @ param propertyNames * Names of properties for each iteration * @ throws IllegalArgumentException * In case when number of properties names does not correspond * with number of iterations */ public void assignPropertyNames ( String ... propertyNames ) throws IllegalArgumentException { } }
if ( propertyNames . length != times ) throw new IllegalArgumentException ( "Invalid length of propertyNames in Repeater." ) ; this . names = Arrays . asList ( propertyNames ) ;
public class RoadNetworkConstants { /** * Set the preferred name for the number of lanes of the roads . * @ param name is the preferred name for the number of lanes of the roads . * @ see # DEFAULT _ ATTR _ LANE _ COUNT */ public static void setPreferredAttributeNameForLaneCount ( String name ) { } }
final Preferences prefs = Preferences . userNodeForPackage ( RoadNetworkConstants . class ) ; if ( prefs != null ) { if ( name == null || "" . equals ( name ) || DEFAULT_ATTR_LANE_COUNT . equalsIgnoreCase ( name ) ) { // $ NON - NLS - 1 $ prefs . remove ( "LANE_COUNT_ATTR_NAME" ) ; // $ NON - NLS - 1 $ } else { prefs . put ( "LANE_COUNT_ATTR_NAME" , name ) ; // $ NON - NLS - 1 $ } }
public class RegionMap { /** * Getter for the region ' s east bound . * @ return the region east bound or { @ link HMConstants # doubleNovalue } */ public double getEast ( ) { } }
Double e = get ( EAST ) ; if ( e != null ) { return e ; } return HMConstants . doubleNovalue ;
public class MsExcelUtils { /** * 保存到本地 , 关闭工作簿 , 并创建一个新的工作簿 * @ param path 路径 * @ throws IOException 异常 * @ throws NoSuchMethodException 异常 * @ throws IllegalAccessException 异常 * @ throws InvocationTargetException 异常 */ public static void writeAndClose ( String path ) throws InvocationTargetException , NoSuchMethodException , IllegalAccessException , IOException { } }
writeTo ( path ) ; xssfWorkbook . close ( ) ; createXssfWorkbook ( ) ;
public class TransformTask { /** * Set the output write mode : default , overwrite , or append . * @ param mode the output write mode * @ return this for method chaining */ public TransformTask setWriteMode ( Target . WriteMode mode ) { } }
Preconditions . checkArgument ( mode != Target . WriteMode . CHECKPOINT , "Checkpoint is not an allowed write mode" ) ; this . mode = mode ; return this ;
public class SingleLockedMessageEnumerationImpl { /** * / * ( non - Javadoc ) * @ see com . ibm . wsspi . sib . core . LockedMessageEnumeration # resetCursor ( ) */ public void resetCursor ( ) throws SISessionUnavailableException , SISessionDroppedException , SIErrorException , SIIncorrectCallException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPILockedMessageEnumeration . tc . isEntryEnabled ( ) ) SibTr . entry ( CoreSPILockedMessageEnumeration . tc , "resetCursor" , this ) ; checkValidState ( "resetCursor" ) ; _localConsumerPoint . checkNotClosed ( ) ; _seenSingleMessage = false ; if ( TraceComponent . isAnyTracingEnabled ( ) && CoreSPILockedMessageEnumeration . tc . isEntryEnabled ( ) ) SibTr . exit ( CoreSPILockedMessageEnumeration . tc , "resetCursor" ) ;
public class JSONObject { /** * Tests if the value should be tried as a decimal . It makes no test if there are actual digits . * @ param val value to test * @ return true if the string is " - 0 " or if it contains ' . ' , ' e ' , or ' E ' , false otherwise . */ protected static boolean isDecimalNotation ( final String val ) { } }
return val . indexOf ( '.' ) > - 1 || val . indexOf ( 'e' ) > - 1 || val . indexOf ( 'E' ) > - 1 || "-0" . equals ( val ) ;
public class JKTableModel { public void setColumnValue ( final int row , final int col , final Object value , final boolean visibleIndex ) { } }
int actualColumn = col ; if ( visibleIndex ) { actualColumn = getActualColumnIndexFromVisible ( col ) ; } getRecord ( row ) . setColumnValue ( actualColumn , value ) ; fireTableCellUpdated ( row , col ) ;
public class OjbConfiguration { /** * Loads the configuration from file " OBJ . properties " . If the system * property " OJB . properties " is set , then the configuration in that file is * loaded . Otherwise , the file " OJB . properties " is tried . If that is also * unsuccessful , then the configuration is filled with default values . */ protected void load ( ) { } }
// properties file may be set as a System property . // if no property is set take default name . String fn = System . getProperty ( OJB_PROPERTIES_FILE , OJB_PROPERTIES_FILE ) ; setFilename ( fn ) ; super . load ( ) ; // default repository & connection descriptor file repositoryFilename = getString ( "repositoryFile" , OJB_METADATA_FILE ) ; // object cache class objectCacheClass = getClass ( "ObjectCacheClass" , ObjectCacheDefaultImpl . class , ObjectCache . class ) ; // load PersistentField Class persistentFieldClass = getClass ( "PersistentFieldClass" , PersistentFieldDirectImpl . class , PersistentField . class ) ; // load PersistenceBroker Class persistenceBrokerClass = getClass ( "PersistenceBrokerClass" , PersistenceBrokerImpl . class , PersistenceBroker . class ) ; // load ListProxy Class listProxyClass = getClass ( "ListProxyClass" , ListProxyDefaultImpl . class ) ; // load SetProxy Class setProxyClass = getClass ( "SetProxyClass" , SetProxyDefaultImpl . class ) ; // load CollectionProxy Class collectionProxyClass = getClass ( "CollectionProxyClass" , CollectionProxyDefaultImpl . class ) ; // load IndirectionHandler Class indirectionHandlerClass = getClass ( "IndirectionHandlerClass" , IndirectionHandlerJDKImpl . class , IndirectionHandler . class ) ; // load ProxyFactory Class proxyFactoryClass = getClass ( "ProxyFactoryClass" , ProxyFactoryJDKImpl . class , ProxyFactory . class ) ; // load configuration for ImplicitLocking parameter : useImplicitLocking = getBoolean ( "ImplicitLocking" , false ) ; // load configuration for LockAssociations parameter : lockAssociationAsWrites = ( getString ( "LockAssociations" , "WRITE" ) . equalsIgnoreCase ( "WRITE" ) ) ; // load OQL Collection Class oqlCollectionClass = getClass ( "OqlCollectionClass" , DListImpl . class , ManageableCollection . class ) ; // set the limit for IN - sql , - 1 for no limits sqlInLimit = getInteger ( "SqlInLimit" , - 1 ) ; // load configuration for PB pool maxActive = getInteger ( PoolConfiguration . MAX_ACTIVE , PoolConfiguration . DEFAULT_MAX_ACTIVE ) ; maxIdle = getInteger ( PoolConfiguration . MAX_IDLE , PoolConfiguration . DEFAULT_MAX_IDLE ) ; maxWait = getLong ( PoolConfiguration . MAX_WAIT , PoolConfiguration . DEFAULT_MAX_WAIT ) ; timeBetweenEvictionRunsMillis = getLong ( PoolConfiguration . TIME_BETWEEN_EVICTION_RUNS_MILLIS , PoolConfiguration . DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS ) ; minEvictableIdleTimeMillis = getLong ( PoolConfiguration . MIN_EVICTABLE_IDLE_TIME_MILLIS , PoolConfiguration . DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS ) ; whenExhaustedAction = getByte ( PoolConfiguration . WHEN_EXHAUSTED_ACTION , PoolConfiguration . DEFAULT_WHEN_EXHAUSTED_ACTION ) ; useSerializedRepository = getBoolean ( "useSerializedRepository" , false ) ;
public class Sizes { /** * Creates and returns an instance of { @ code ConstantSize } from the given encoded size and unit * description . * @ param encodedValueAndUnit value and unit in string representation * @ param horizontaltrue for horizontal , false for vertical * @ return a { @ code ConstantSize } for the given value and unit */ public static ConstantSize constant ( String encodedValueAndUnit , boolean horizontal ) { } }
String lowerCase = encodedValueAndUnit . toLowerCase ( Locale . ENGLISH ) ; String trimmed = lowerCase . trim ( ) ; return ConstantSize . valueOf ( trimmed , horizontal ) ;
public class CmsWebdavServlet { /** * Parse the range header . < p > * @ param request the servlet request we are processing * @ param response the servlet response we are creating * @ param item the WebdavItem with the information * @ return Vector of ranges */ protected ArrayList < CmsWebdavRange > parseRange ( HttpServletRequest request , HttpServletResponse response , I_CmsRepositoryItem item ) { } }
// Checking If - Range String headerValue = request . getHeader ( HEADER_IFRANGE ) ; if ( headerValue != null ) { long headerValueTime = ( - 1L ) ; try { headerValueTime = request . getDateHeader ( HEADER_IFRANGE ) ; } catch ( Exception e ) { // noop } String eTag = getETag ( item ) ; long lastModified = item . getLastModifiedDate ( ) ; if ( headerValueTime == ( - 1L ) ) { // If the ETag the client gave does not match the entity // etag , then the entire entity is returned . if ( ! eTag . equals ( headerValue . trim ( ) ) ) { return FULL_RANGE ; } } else { // If the timestamp of the entity the client got is older than // the last modification date of the entity , the entire entity // is returned . if ( lastModified > ( headerValueTime + 1000 ) ) { return FULL_RANGE ; } } } long fileLength = item . getContentLength ( ) ; if ( fileLength == 0 ) { return null ; } // Retrieving the range header ( if any is specified String rangeHeader = request . getHeader ( HEADER_RANGE ) ; if ( rangeHeader == null ) { return null ; } // bytes is the only range unit supported ( and I don ' t see the point // of adding new ones ) . if ( ! rangeHeader . startsWith ( "bytes" ) ) { response . addHeader ( HEADER_CONTENTRANGE , "bytes */" + fileLength ) ; response . setStatus ( HttpServletResponse . SC_REQUESTED_RANGE_NOT_SATISFIABLE ) ; return null ; } rangeHeader = rangeHeader . substring ( 6 ) ; // Vector which will contain all the ranges which are successfully parsed . ArrayList < CmsWebdavRange > result = new ArrayList < CmsWebdavRange > ( ) ; StringTokenizer commaTokenizer = new StringTokenizer ( rangeHeader , "," ) ; // Parsing the range list while ( commaTokenizer . hasMoreTokens ( ) ) { String rangeDefinition = commaTokenizer . nextToken ( ) . trim ( ) ; CmsWebdavRange currentRange = new CmsWebdavRange ( ) ; currentRange . setLength ( fileLength ) ; int dashPos = rangeDefinition . indexOf ( '-' ) ; if ( dashPos == - 1 ) { response . addHeader ( HEADER_CONTENTRANGE , "bytes */" + fileLength ) ; response . setStatus ( HttpServletResponse . SC_REQUESTED_RANGE_NOT_SATISFIABLE ) ; return null ; } if ( dashPos == 0 ) { try { long offset = Long . parseLong ( rangeDefinition ) ; currentRange . setStart ( fileLength + offset ) ; currentRange . setEnd ( fileLength - 1 ) ; } catch ( NumberFormatException e ) { response . addHeader ( HEADER_CONTENTRANGE , "bytes */" + fileLength ) ; response . setStatus ( HttpServletResponse . SC_REQUESTED_RANGE_NOT_SATISFIABLE ) ; return null ; } } else { try { currentRange . setStart ( Long . parseLong ( rangeDefinition . substring ( 0 , dashPos ) ) ) ; if ( dashPos < ( rangeDefinition . length ( ) - 1 ) ) { currentRange . setEnd ( Long . parseLong ( rangeDefinition . substring ( dashPos + 1 , rangeDefinition . length ( ) ) ) ) ; } else { currentRange . setEnd ( fileLength - 1 ) ; } } catch ( NumberFormatException e ) { response . addHeader ( HEADER_CONTENTRANGE , "bytes */" + fileLength ) ; response . setStatus ( HttpServletResponse . SC_REQUESTED_RANGE_NOT_SATISFIABLE ) ; return null ; } } if ( ! currentRange . validate ( ) ) { response . addHeader ( HEADER_CONTENTRANGE , "bytes */" + fileLength ) ; response . setStatus ( HttpServletResponse . SC_REQUESTED_RANGE_NOT_SATISFIABLE ) ; return null ; } result . add ( currentRange ) ; } return result ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcEvaporatorTypeEnum ( ) { } }
if ( ifcEvaporatorTypeEnumEEnum == null ) { ifcEvaporatorTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 835 ) ; } return ifcEvaporatorTypeEnumEEnum ;
public class MZXMLMultiSpectraParser { /** * Intended use : find the length of the last scan entry in the file . MzXML might contain * chromatograms after scans , and the index only contains the offset of the last scan - there is * no easy way to figure out what the length is . If you just consider , that the length is from the * offset to the beginning of the index , then you might end up reading several hundred Mb of * chromatogram data . To use this method properly , you should create a File - based stream * ( buffered ) , starting at the offset of the last scan and this method will read the file until it * finds the corresponding closing tag . * @ return The length of the first scan entry in this stream , or , more precisely , the offset in * the stream of the end of the closing tag of the first ' scan ' tag . Or - 1 if no matching pair of * ' scan ' tags was found . */ int findThisStreamFirstScanLen ( ) throws FileParsingException { } }
int length = - 1 ; numOpeningScanTagsFound = 0 ; XMLStreamReaderImpl reader = null ; try { reader = ( readerPool == null ) ? new XMLStreamReaderImpl ( ) : readerPool . borrowObject ( ) ; reader . setInput ( is , StandardCharsets . UTF_8 . name ( ) ) ; LogHelper . setJavolutionLogLevelFatal ( ) ; int eventType = XMLStreamConstants . END_DOCUMENT ; CharArray localName ; do { // Read the next XML element try { eventType = reader . next ( ) ; } catch ( XMLStreamException e ) { if ( e instanceof XMLUnexpectedEndTagException ) { continue ; } if ( e instanceof XMLUnexpectedEndOfDocumentException ) { // this happens when we have nested < scan > tags and parsing not the whole MS1 / MS2 child pairs return length ; } throw new FileParsingException ( e ) ; } // Process the read event switch ( eventType ) { case XMLStreamConstants . START_ELEMENT : localName = reader . getLocalName ( ) ; if ( localName . contentEquals ( TAG . SCAN . name ) ) { numOpeningScanTagsFound += 1 ; break ; } break ; case XMLStreamConstants . END_ELEMENT : localName = reader . getLocalName ( ) ; if ( localName . contentEquals ( TAG . SCAN . name ) ) { if ( numOpeningScanTagsFound == 1 ) { final XMLStreamReaderImpl . LocationImpl loc = reader . getLocation ( ) ; length = loc . getCharacterOffset ( ) + loc . getBomLength ( ) ; return length ; } } break ; } } while ( eventType != XMLStreamConstants . END_DOCUMENT ) ; } catch ( Exception e ) { throw new FileParsingException ( e ) ; } finally { // we need to return the reaer to the pool , if we borrowed it from there if ( readerPool != null && reader != null ) { try { readerPool . returnObject ( reader ) ; } catch ( Exception e ) { throw new FileParsingException ( e ) ; } } } return length ;
public class DynamicOutputBuffer { /** * Tries to copy as much bytes as possible from this buffer to * the given channel . This method always copies whole internal * buffers and deallocates them afterwards . It does not deallocate * the buffer the write position is currently pointing to nor does * it deallocate buffers following the write position . The method * increases an internal pointer so consecutive calls also copy * consecutive bytes . * @ param out the channel to write to * @ throws IOException if the buffer could not be flushed */ public void flushTo ( WritableByteChannel out ) throws IOException { } }
int n1 = _flushPosition / _bufferSize ; int n2 = _position / _bufferSize ; while ( n1 < n2 ) { ByteBuffer bb = _buffers . get ( n1 ) ; bb . rewind ( ) ; out . write ( bb ) ; deallocateBuffer ( n1 ) ; _flushPosition += _bufferSize ; ++ n1 ; }
public class LuceneQueryBuilder { public Object visit ( QueryRootNode node , Object data ) throws RepositoryException { } }
BooleanQuery root = new BooleanQuery ( ) ; Query wrapped = root ; if ( node . getLocationNode ( ) != null ) { wrapped = ( Query ) node . getLocationNode ( ) . accept ( this , root ) ; } return wrapped ;
public class Cursor { /** * Determines whether the input { @ link IoDevice } is compatible with the { @ link Cursor } . * @ return true if device is compatible , else false */ public boolean isDeviceCompatible ( final IoDevice device ) { } }
List < IoDevice > ioDevices = new LinkedList < IoDevice > ( ) ; for ( PriorityIoDeviceTuple tuple : mCompatibleDevices ) { if ( tuple . getIoDevice ( ) . equals ( device ) ) { return true ; } } return false ;
public class DBagImpl { /** * This method returns the number of occurrences of the object < code > obj < / code > * in the < code > DBag < / code > collection . * @ param obj The value that may have elements in the collection . * @ return The number of occurrences of < code > obj < / code > in this collection . */ public int occurrences ( Object obj ) { } }
int count = 0 ; for ( int i = 0 ; i < this . size ( ) ; i ++ ) { if ( ( obj == null ) ? this . get ( i ) == null : this . get ( i ) . equals ( obj ) ) { count ++ ; } } return count ;
public class CacheFilter { /** * Check whether the URL start with one of the given prefixes . * @ param uri URI * @ param patterns possible prefixes * @ return true when URL starts with one of the prefixes */ public boolean checkPrefixes ( String uri , String [ ] patterns ) { } }
for ( String pattern : patterns ) { if ( pattern . length ( ) > 0 ) { if ( uri . startsWith ( pattern ) ) { return true ; } } } return false ;
public class CssEscape { /** * Perform a ( configurable ) CSS Identifier < strong > escape < / strong > operation on a < tt > String < / tt > input , * writing the results to a < tt > Writer < / tt > . * This method will perform an escape operation according to the specified * { @ link CssIdentifierEscapeType } and * { @ link CssIdentifierEscapeLevel } argument values . * All other < tt > String < / tt > / < tt > Writer < / tt > - based < tt > escapeCssIdentifier * ( . . . ) < / tt > methods call this one with preconfigured * < tt > type < / tt > and < tt > level < / tt > values . * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will * be written at all to this writer if input is < tt > null < / tt > . * @ param type the type of escape operation to be performed , see * { @ link CssIdentifierEscapeType } . * @ param level the escape level to be applied , see { @ link CssIdentifierEscapeLevel } . * @ throws IOException if an input / output exception occurs * @ since 1.1.2 */ public static void escapeCssIdentifier ( final String text , final Writer writer , final CssIdentifierEscapeType type , final CssIdentifierEscapeLevel level ) throws IOException { } }
if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( type == null ) { throw new IllegalArgumentException ( "The 'type' argument cannot be null" ) ; } if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } CssIdentifierEscapeUtil . escape ( new InternalStringReader ( text ) , writer , type , level ) ;
public class ArrayUtil { /** * 映射键值 ( 参考Python的zip ( ) 函数 ) < br > * 例如 : < br > * keys = [ a , b , c , d ] < br > * values = [ 1,2,3,4 ] < br > * 则得到的Map是 { a = 1 , b = 2 , c = 3 , d = 4 } < br > * 如果两个数组长度不同 , 则只对应最短部分 * @ param < K > Key类型 * @ param < V > Value类型 * @ param keys 键列表 * @ param values 值列表 * @ param isOrder 是否有序 * @ return Map * @ since 3.0.4 */ public static < K , V > Map < K , V > zip ( K [ ] keys , V [ ] values , boolean isOrder ) { } }
if ( isEmpty ( keys ) || isEmpty ( values ) ) { return null ; } final int size = Math . min ( keys . length , values . length ) ; final Map < K , V > map = CollectionUtil . newHashMap ( size , isOrder ) ; for ( int i = 0 ; i < size ; i ++ ) { map . put ( keys [ i ] , values [ i ] ) ; } return map ;
public class AuthorizationImpl { /** * Answers if the principal has permission to SUBSCRIBE to this Channel . * @ return boolean * @ param principal IAuthorizationPrincipal * @ param portletDefinitionId * @ exception AuthorizationException indicates authorization information could not be retrieved . */ @ Override @ RequestCache public boolean canPrincipalSubscribe ( IAuthorizationPrincipal principal , String portletDefinitionId ) { } }
String owner = IPermission . PORTAL_SUBSCRIBE ; // retrieve the indicated channel from the channel registry store and // determine its current lifecycle state IPortletDefinition portlet = this . portletDefinitionRegistry . getPortletDefinition ( portletDefinitionId ) ; if ( portlet == null ) { return false ; } String target = PermissionHelper . permissionTargetIdForPortletDefinition ( portlet ) ; PortletLifecycleState state = portlet . getLifecycleState ( ) ; /* * Each channel lifecycle state now has its own subscribe permission . The * following logic checks the appropriate permission for the lifecycle . */ String permission ; if ( state . equals ( PortletLifecycleState . PUBLISHED ) || state . equals ( PortletLifecycleState . MAINTENANCE ) ) { // NB : There is no separate SUBSCRIBE permission for MAINTENANCE // mode ; everyone simply sees the ' out of service ' message permission = IPermission . PORTLET_SUBSCRIBER_ACTIVITY ; } else if ( state . equals ( PortletLifecycleState . APPROVED ) ) { permission = IPermission . PORTLET_SUBSCRIBER_APPROVED_ACTIVITY ; } else if ( state . equals ( PortletLifecycleState . CREATED ) ) { permission = IPermission . PORTLET_SUBSCRIBER_CREATED_ACTIVITY ; } else if ( state . equals ( PortletLifecycleState . EXPIRED ) ) { permission = IPermission . PORTLET_SUBSCRIBER_EXPIRED_ACTIVITY ; } else { throw new AuthorizationException ( "Unrecognized lifecycle state for channel " + portletDefinitionId ) ; } // Test the appropriate permission . return doesPrincipalHavePermission ( principal , owner , permission , target ) ;
public class ConvertKit { /** * bits转bytes * @ param bits 二进制 * @ return bytes */ public static byte [ ] bits2Bytes ( String bits ) { } }
int lenMod = bits . length ( ) % 8 ; int byteLen = bits . length ( ) / 8 ; // 不是8的倍数前面补0 if ( lenMod != 0 ) { for ( int i = lenMod ; i < 8 ; i ++ ) { bits = "0" + bits ; } byteLen ++ ; } byte [ ] bytes = new byte [ byteLen ] ; for ( int i = 0 ; i < byteLen ; ++ i ) { for ( int j = 0 ; j < 8 ; ++ j ) { bytes [ i ] <<= 1 ; bytes [ i ] |= bits . charAt ( i * 8 + j ) - '0' ; } } return bytes ;
public class MarketplaceAgreementsInner { /** * Sign marketplace terms . * @ param publisherId Publisher identifier string of image being deployed . * @ param offerId Offer identifier string of image being deployed . * @ param planId Plan identifier string of image being deployed . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < AgreementTermsInner > signAsync ( String publisherId , String offerId , String planId , final ServiceCallback < AgreementTermsInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( signWithServiceResponseAsync ( publisherId , offerId , planId ) , serviceCallback ) ;
public class ClassProject { /** * Add this field in the Record ' s field sequence . */ public BaseField setupField ( int iFieldSeq ) { } }
BaseField field = null ; // if ( iFieldSeq = = 0) // field = new CounterField ( this , ID , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 1) // field = new RecordChangedField ( this , LAST _ CHANGED , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // field . setHidden ( true ) ; // if ( iFieldSeq = = 2) // field = new BooleanField ( this , DELETED , Constants . DEFAULT _ FIELD _ LENGTH , null , new Boolean ( false ) ) ; // field . setHidden ( true ) ; if ( iFieldSeq == 3 ) { field = new StringField ( this , NAME , 100 , null , null ) ; field . setNullable ( false ) ; } if ( iFieldSeq == 4 ) field = new ClassProjectField ( this , PARENT_FOLDER_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; // if ( iFieldSeq = = 5) // field = new ShortField ( this , SEQUENCE , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // if ( iFieldSeq = = 6) // field = new MemoField ( this , COMMENT , Constants . DEFAULT _ FIELD _ LENGTH , null , null ) ; // if ( iFieldSeq = = 7) // field = new StringField ( this , CODE , 30 , null , null ) ; if ( iFieldSeq == 8 ) field = new StringField ( this , DESCRIPTION , 100 , null , null ) ; if ( iFieldSeq == 9 ) field = new BooleanField ( this , SYSTEM_CLASSES , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 10 ) field = new StringField ( this , PACKAGE_NAME , 100 , null , null ) ; if ( iFieldSeq == 11 ) field = new StringField ( this , PROJECT_PATH , 100 , null , null ) ; if ( iFieldSeq == 12 ) field = new StringField ( this , INTERFACE_PACKAGE , 100 , null , null ) ; if ( iFieldSeq == 13 ) field = new StringField ( this , INTERFACE_PROJECT_PATH , 100 , null , null ) ; if ( iFieldSeq == 14 ) field = new StringField ( this , THIN_PACKAGE , 100 , null , null ) ; if ( iFieldSeq == 15 ) field = new StringField ( this , THIN_PROJECT_PATH , 100 , null , null ) ; if ( iFieldSeq == 16 ) field = new StringField ( this , RESOURCE_PACKAGE , 100 , null , null ) ; if ( iFieldSeq == 17 ) field = new StringField ( this , RES_PROJECT_PATH , 100 , null , null ) ; if ( iFieldSeq == 18 ) field = new PropertiesField ( this , PROPERTIES , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 19 ) field = new StringField ( this , ARTIFACT_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( iFieldSeq == 20 ) field = new StringField ( this , GROUP_ID , Constants . DEFAULT_FIELD_LENGTH , null , null ) ; if ( field == null ) field = super . setupField ( iFieldSeq ) ; return field ;
public class JawrBinaryResourceRequestHandler { /** * Process the request * @ param requestedPath * the requested path * @ param request * the request * @ param response * the response * @ param bundleHashcodeType * the bundle hashcode type * @ throws IOException * if an IOException occurs */ @ Override protected void processRequest ( String requestedPath , HttpServletRequest request , HttpServletResponse response , BundleHashcodeType bundleHashcodeType ) throws IOException { } }
boolean responseHeaderWritten = false ; boolean validBundle = true ; if ( ! jawrConfig . isDebugModeOn ( ) && jawrConfig . isStrictMode ( ) && bundleHashcodeType . equals ( BundleHashcodeType . INVALID_HASHCODE ) ) { validBundle = false ; } // If debug mode is off , check for If - Modified - Since and // If - none - match headers and set response caching headers . if ( ! this . jawrConfig . isDebugModeOn ( ) ) { // If a browser checks for changes , always respond ' no changes ' . if ( validBundle && ( null != request . getHeader ( IF_MODIFIED_SINCE_HEADER ) || null != request . getHeader ( IF_NONE_MATCH_HEADER ) ) ) { response . setStatus ( HttpServletResponse . SC_NOT_MODIFIED ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Returning 'not modified' header. " ) ; return ; } if ( validBundle ) { // Add caching headers setResponseHeaders ( response ) ; } else { responseHeaderWritten = illegalBundleRequestHandler . writeResponseHeader ( requestedPath , request , response ) ; if ( ! responseHeaderWritten ) { // Add caching headers setResponseHeaders ( response ) ; } } } // Returns the real file path String filePath = getRealFilePath ( requestedPath , bundleHashcodeType ) ; try { if ( isValidRequestedPath ( filePath ) && ( validBundle || illegalBundleRequestHandler . canWriteContent ( requestedPath , request ) ) ) { // Set the content type response . setContentType ( getContentType ( requestedPath , request ) ) ; writeContent ( filePath , request , response ) ; } else { if ( ! responseHeaderWritten ) { LOGGER . error ( "Unable to load the image for the request URI : " + request . getRequestURI ( ) ) ; response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; } } } catch ( EOFException eofex ) { LOGGER . info ( "Browser cut off response" , eofex ) ; } catch ( ResourceNotFoundException e ) { LOGGER . info ( "Unable to write resource " + request . getRequestURI ( ) , e ) ; response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; } catch ( IOException ex ) { LOGGER . error ( "Unable to write resource " + request . getRequestURI ( ) , ex ) ; response . sendError ( HttpServletResponse . SC_NOT_FOUND ) ; } if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "request succesfully attended" ) ;
public class ClassLoaderLeakPreventorFactory { /** * Get instance of { @ link PreClassLoaderInitiator } for further configuring */ public < C extends PreClassLoaderInitiator > void removePreInitiator ( Class < C > clazz ) { } }
this . preInitiators . remove ( clazz . getName ( ) ) ;
public class DBManagerService { /** * stamps , allowing for either property to be null for older definitions . */ private static boolean sameTenantDefs ( TenantDefinition tenantDef1 , TenantDefinition tenantDef2 ) { } }
return isEqual ( tenantDef1 . getProperty ( TenantService . CREATED_ON_PROP ) , tenantDef2 . getProperty ( TenantService . CREATED_ON_PROP ) ) && isEqual ( tenantDef1 . getProperty ( TenantService . CREATED_ON_PROP ) , tenantDef2 . getProperty ( TenantService . CREATED_ON_PROP ) ) ;