signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class IndexWriter { /** * Generates one or more { @ link AttributeUpdate } s that will create or update the necessary Table Buckets entries * in the Segment ' s Extended Attributes . * @ param bucket The Bucket to create or update . * @ param bucketOffset The Bucket ' s new offset . * @ param update A { @ link UpdateInstructions } object to collect updates into . */ private void generateBucketUpdate ( TableBucket bucket , long bucketOffset , UpdateInstructions update ) { } }
assert bucketOffset >= 0 ; update . withAttribute ( new AttributeUpdate ( bucket . getHash ( ) , AttributeUpdateType . Replace , bucketOffset ) ) ; if ( ! bucket . exists ( ) ) { update . bucketAdded ( ) ; }
public class NodeRepository { /** * Updates the supplied node record , inserting it into the database if necessary . */ public void updateNode ( NodeRecord record ) { } }
record . lastUpdated = new Timestamp ( System . currentTimeMillis ( ) ) ; store ( record ) ;
public class TimeParameter { /** * Write Time parameter to outputStream . * @ param pos the stream to write to */ public void writeTo ( final PacketOutputStream pos ) throws IOException { } }
SimpleDateFormat sdf = new SimpleDateFormat ( "HH:mm:ss" ) ; sdf . setTimeZone ( timeZone ) ; String dateString = sdf . format ( time ) ; pos . write ( QUOTE ) ; pos . write ( dateString . getBytes ( ) ) ; int microseconds = ( int ) ( time . getTime ( ) % 1000 ) * 1000 ; if ( microseconds > 0 && fractionalSeconds ) { pos . write ( '.' ) ; int factor = 100000 ; while ( microseconds > 0 ) { int dig = microseconds / factor ; pos . write ( '0' + dig ) ; microseconds -= dig * factor ; factor /= 10 ; } } pos . write ( QUOTE ) ;
public class FeedbackController { /** * Formats a MolgenisUser ' s name . * @ return String containing the user ' s first name , middle names and last name . */ private static String getFormattedName ( User user ) { } }
List < String > parts = new ArrayList < > ( ) ; if ( user . getTitle ( ) != null ) { parts . add ( user . getTitle ( ) ) ; } if ( user . getFirstName ( ) != null ) { parts . add ( user . getFirstName ( ) ) ; } if ( user . getMiddleNames ( ) != null ) { parts . add ( user . getMiddleNames ( ) ) ; } if ( user . getLastName ( ) != null ) { parts . add ( user . getLastName ( ) ) ; } if ( parts . isEmpty ( ) ) { return null ; } else { return StringUtils . collectionToDelimitedString ( parts , " " ) ; }
public class TransformationSummary { /** * < code > repeated . google . privacy . dlp . v2 . TransformationSummary . SummaryResult results = 4 ; < / code > */ public com . google . privacy . dlp . v2 . TransformationSummary . SummaryResultOrBuilder getResultsOrBuilder ( int index ) { } }
return results_ . get ( index ) ;
public class JvmSpecializedTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_SPECIALIZED_TYPE_REFERENCE__EQUIVALENT : setEquivalent ( ( JvmTypeReference ) null ) ; return ; } super . eUnset ( featureID ) ;
public class StandardParameterParser { /** * Converts the given { @ code List } of parameters to a { @ code Map } . * The names of parameters are used as keys ( mapping to corresponding value ) thus removing any duplicated parameters . It is * used an empty { @ code String } for the mapping , if the parameter has no value ( { @ code null } ) . * @ param parameters the { @ code List } to be converted , must not be { @ code null } * @ return a { @ code Map } containing the parameters */ private static Map < String , String > convertParametersList ( List < NameValuePair > parameters ) { } }
Map < String , String > map = new HashMap < > ( ) ; for ( NameValuePair parameter : parameters ) { String value = parameter . getValue ( ) ; if ( value == null ) { value = "" ; } map . put ( parameter . getName ( ) , value ) ; } return map ;
public class ServiceContext { /** * Cleanup at the end of a request . */ public static void end ( ) { } }
ServiceContext context = ( ServiceContext ) _localContext . get ( ) ; if ( context != null && -- context . _count == 0 ) { context . _request = null ; context . _response = null ; context . _headers . clear ( ) ; _localContext . set ( null ) ; }
public class Base64 { /** * Serializes an object and returns the Base64 - encoded version of that * serialized object . * As of v 2.3 , if the object cannot be serialized or there is another error , * the method will throw an IOException . < b > This is new to v2.3 ! < / b > In * earlier versions , it just returned a null value , but in retrospect that ' s a * pretty poor way to handle it . * The object is not GZip - compressed before being encoded . * Example options : * < pre > * GZIP : gzip - compresses object before encoding it . * DO _ BREAK _ LINES : break lines at 76 characters * < / pre > * Example : < code > encodeObject ( myObj , Base64 . GZIP ) < / code > or * Example : * < code > encodeObject ( myObj , Base64 . GZIP | Base64 . DO _ BREAK _ LINES ) < / code > * @ param aSerializableObject * The object to encode * @ param nOptions * Specified options * @ return The Base64 - encoded object * @ see Base64 # GZIP * @ see Base64 # DO _ BREAK _ LINES * @ throws IOException * if there is an error * @ since 2.0 */ @ Nonnull public static String encodeObject ( @ Nonnull final Serializable aSerializableObject , final int nOptions ) throws IOException { } }
ValueEnforcer . notNull ( aSerializableObject , "Object" ) ; // ObjectOutputStream - > ( GZIP ) - > Base64 - > ByteArrayOutputStream final NonBlockingByteArrayOutputStream baos = new NonBlockingByteArrayOutputStream ( ) ; try ( final Base64OutputStream b64os = new Base64OutputStream ( baos , ENCODE | nOptions ) ) { if ( ( nOptions & GZIP ) != 0 ) { // Gzip try ( GZIPOutputStream gzos = new GZIPOutputStream ( b64os ) ; ObjectOutputStream oos = new ObjectOutputStream ( gzos ) ) { oos . writeObject ( aSerializableObject ) ; } } else { // Not gzipped try ( ObjectOutputStream oos = new ObjectOutputStream ( b64os ) ) { oos . writeObject ( aSerializableObject ) ; } } } // Return value according to relevant encoding . return baos . getAsString ( PREFERRED_ENCODING ) ;
public class DistributionBeanQuery { /** * Load all the Distribution set . * @ param startIndex * as page start * @ param count * as total data */ @ Override protected List < ProxyDistribution > loadBeans ( final int startIndex , final int count ) { } }
Page < DistributionSet > distBeans ; final List < ProxyDistribution > proxyDistributions = new ArrayList < > ( ) ; if ( startIndex == 0 && firstPageDistributionSets != null ) { distBeans = firstPageDistributionSets ; } else if ( pinnedTarget != null ) { final DistributionSetFilterBuilder distributionSetFilterBuilder = new DistributionSetFilterBuilder ( ) . setIsDeleted ( false ) . setIsComplete ( true ) . setSearchText ( searchText ) . setSelectDSWithNoTag ( noTagClicked ) . setTagNames ( distributionTags ) ; distBeans = getDistributionSetManagement ( ) . findByFilterAndAssignedInstalledDsOrderedByLinkTarget ( new OffsetBasedPageRequest ( startIndex , count , sort ) , distributionSetFilterBuilder , pinnedTarget . getControllerId ( ) ) ; } else if ( distributionTags . isEmpty ( ) && StringUtils . isEmpty ( searchText ) && ! noTagClicked ) { // if no search filters available distBeans = getDistributionSetManagement ( ) . findByCompleted ( new OffsetBasedPageRequest ( startIndex , count , sort ) , true ) ; } else { final DistributionSetFilter distributionSetFilter = new DistributionSetFilterBuilder ( ) . setIsDeleted ( false ) . setIsComplete ( true ) . setSearchText ( searchText ) . setSelectDSWithNoTag ( noTagClicked ) . setTagNames ( distributionTags ) . build ( ) ; distBeans = getDistributionSetManagement ( ) . findByDistributionSetFilter ( new OffsetBasedPageRequest ( startIndex , count , sort ) , distributionSetFilter ) ; } for ( final DistributionSet distributionSet : distBeans ) { final ProxyDistribution proxyDistribution = new ProxyDistribution ( ) ; proxyDistribution . setName ( distributionSet . getName ( ) ) ; proxyDistribution . setDescription ( distributionSet . getDescription ( ) ) ; proxyDistribution . setId ( distributionSet . getId ( ) ) ; proxyDistribution . setDistId ( distributionSet . getId ( ) ) ; proxyDistribution . setVersion ( distributionSet . getVersion ( ) ) ; proxyDistribution . setCreatedDate ( SPDateTimeUtil . getFormattedDate ( distributionSet . getCreatedAt ( ) ) ) ; proxyDistribution . setLastModifiedDate ( SPDateTimeUtil . getFormattedDate ( distributionSet . getLastModifiedAt ( ) ) ) ; proxyDistribution . setCreatedByUser ( UserDetailsFormatter . loadAndFormatCreatedBy ( distributionSet ) ) ; proxyDistribution . setModifiedByUser ( UserDetailsFormatter . loadAndFormatLastModifiedBy ( distributionSet ) ) ; proxyDistribution . setNameVersion ( HawkbitCommonUtil . getFormattedNameVersion ( distributionSet . getName ( ) , distributionSet . getVersion ( ) ) ) ; proxyDistributions . add ( proxyDistribution ) ; } return proxyDistributions ;
public class LabsInner { /** * Register to managed lab . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > registerAsync ( String resourceGroupName , String labAccountName , String labName ) { } }
return registerWithServiceResponseAsync ( resourceGroupName , labAccountName , labName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class RaftServiceManager { /** * Applies an open session entry to the state machine . */ private long applyOpenSession ( Indexed < OpenSessionEntry > entry ) { } }
PrimitiveType primitiveType = raft . getPrimitiveTypes ( ) . getPrimitiveType ( entry . entry ( ) . serviceType ( ) ) ; // Get the state machine executor or create one if it doesn ' t already exist . RaftServiceContext service = getOrInitializeService ( PrimitiveId . from ( entry . index ( ) ) , primitiveType , entry . entry ( ) . serviceName ( ) , entry . entry ( ) . serviceConfig ( ) ) ; if ( service == null ) { throw new RaftException . UnknownService ( "Unknown service type " + entry . entry ( ) . serviceType ( ) ) ; } SessionId sessionId = SessionId . from ( entry . index ( ) ) ; RaftSession session = raft . getSessions ( ) . addSession ( new RaftSession ( sessionId , MemberId . from ( entry . entry ( ) . memberId ( ) ) , entry . entry ( ) . serviceName ( ) , primitiveType , entry . entry ( ) . readConsistency ( ) , entry . entry ( ) . minTimeout ( ) , entry . entry ( ) . maxTimeout ( ) , entry . entry ( ) . timestamp ( ) , service . serializer ( ) , service , raft , threadContextFactory ) ) ; return service . openSession ( entry . index ( ) , entry . entry ( ) . timestamp ( ) , session ) ;
public class CmsJspTagEdit { /** * Returns the resource to edit according to the uuid provided via the tag ' s attribute " uuid " . < p > * @ param cms the CMS context * @ return the resource */ private CmsResource getResourceToEdit ( CmsObject cms ) { } }
CmsResource resource = null ; if ( m_uuid != null ) { try { CmsUUID uuid = new CmsUUID ( m_uuid ) ; resource = cms . readResource ( uuid , CmsResourceFilter . ignoreExpirationOffline ( cms ) ) ; } catch ( NumberFormatException | CmsException e ) { LOG . warn ( "UUID was not valid or there is no resource with the given UUID." , e ) ; } } return resource ;
public class OperatorIntersectionCursor { /** * as indicated with the dimensionMask */ GeometryCursor prepareVector_ ( VertexDescription descr , int dimensionMask , Geometry [ ] res_vec ) { } }
int inext = 0 ; if ( ( dimensionMask & 1 ) != 0 ) { if ( res_vec [ 0 ] == null ) res_vec [ 0 ] = new MultiPoint ( descr ) ; inext ++ ; } else { for ( int i = 0 ; i < res_vec . length - 1 ; i ++ ) res_vec [ i ] = res_vec [ i + 1 ] ; } if ( ( dimensionMask & 2 ) != 0 ) { if ( res_vec [ inext ] == null ) res_vec [ inext ] = new Polyline ( descr ) ; inext ++ ; } else { for ( int i = inext ; i < res_vec . length - 1 ; i ++ ) res_vec [ i ] = res_vec [ i + 1 ] ; } if ( ( dimensionMask & 4 ) != 0 ) { if ( res_vec [ inext ] == null ) res_vec [ inext ] = new Polygon ( descr ) ; inext ++ ; } else { for ( int i = inext ; i < res_vec . length - 1 ; i ++ ) res_vec [ i ] = res_vec [ i + 1 ] ; } if ( inext != 3 ) { Geometry [ ] r = new Geometry [ inext ] ; for ( int i = 0 ; i < inext ; i ++ ) r [ i ] = res_vec [ i ] ; return new SimpleGeometryCursor ( r ) ; } else { return new SimpleGeometryCursor ( res_vec ) ; }
public class CmsPropertyDefinitionButton { /** * Opens the dialog for creating new property definitions . < p > */ protected void editPropertyDefinition ( ) { } }
CmsRpcAction < ArrayList < String > > action = new CmsRpcAction < ArrayList < String > > ( ) { @ Override public void execute ( ) { start ( 200 , true ) ; CmsCoreProvider . getVfsService ( ) . getDefinedProperties ( this ) ; } @ Override protected void onResponse ( ArrayList < String > result ) { stop ( false ) ; CmsPropertyDefinitionDialog dialog = new CmsPropertyDefinitionDialog ( result ) ; dialog . center ( ) ; dialog . addCloseHandler ( new CloseHandler < PopupPanel > ( ) { public void onClose ( CloseEvent < PopupPanel > event ) { onClosePropertyDefinitionDialog ( ) ; } } ) ; } } ; action . execute ( ) ;
public class AdminElevatewordAction { private HtmlResponse asListHtml ( ) { } }
return asHtml ( path_AdminElevateword_AdminElevatewordJsp ) . renderWith ( data -> { RenderDataUtil . register ( data , "elevateWordItems" , elevateWordService . getElevateWordList ( elevateWordPager ) ) ; // page navi } ) . useForm ( SearchForm . class , setup -> { setup . setup ( form -> { copyBeanToBean ( elevateWordPager , form , op -> op . include ( "id" ) ) ; } ) ; } ) ;
public class WebService { /** * method to generate the natural analogue sequence for all * peptide - sequences from an HELM input * @ param notation * input HELM * @ return natural analogue peptide sequences , divided by white spaces * @ throws org . helm . notation2 . parser . exceptionparser . NotationException * if the input complex notation contains non - peptide polymer ( s ) * @ throws HELM2HandledException * if the HELM input contains HELM2 features * @ throws ValidationException * if the input HELM is not valid * @ throws MonomerLoadingException * if the MonomerFactory can not be refreshed * @ throws PeptideUtilsException * if the polymer is not a peptide * @ throws org . helm . notation2 . parser . exceptionparser . NotationException * if notation is not valid * @ throws ChemistryException * if the Chemistry Engine can not be initialized */ public String generateNaturalAnalogSequencePeptide ( String notation ) throws HELM2HandledException , ValidationException , MonomerLoadingException , PeptideUtilsException , org . helm . notation2 . parser . exceptionparser . NotationException , ChemistryException { } }
String result = SequenceConverter . getPeptideNaturalAnalogSequenceFromNotation ( validate ( notation ) ) ; setMonomerFactoryToDefault ( notation ) ; return result ;
public class GoogleCloudStorageFileSystem { /** * Gets information about the given path item . * @ param path The path we want information about . * @ return Information about the given path item . * @ throws IOException */ public FileInfo getFileInfo ( URI path ) throws IOException { } }
logger . atFine ( ) . log ( "getFileInfo(%s)" , path ) ; checkArgument ( path != null , "path must not be null" ) ; // Validate the given path . true = = allow empty object name . // One should be able to get info about top level directory ( = = bucket ) , // therefore we allow object name to be empty . StorageResourceId resourceId = pathCodec . validatePathAndGetId ( path , true ) ; FileInfo fileInfo = FileInfo . fromItemInfo ( pathCodec , getFileInfoInternal ( resourceId , gcs . getOptions ( ) . isInferImplicitDirectoriesEnabled ( ) ) ) ; logger . atFine ( ) . log ( "getFileInfo: %s" , fileInfo ) ; return fileInfo ;
public class DistanceTravelledMovieMaker { /** * Saves a sequence of image files of Vessel Traffic Density plots to the * < code > imageDirectory < / code > with filenames map1 . png , map2 . png , etc . * @ param options * @ param files * @ param times * @ param imageDirectory */ private static void saveImagesWithTimeRange ( final Options options , final Observable < File > files , Observable < Long > times , final String imageDirectory ) { } }
times . buffer ( 2 , 1 ) . doOnNext ( new Action1 < List < Long > > ( ) { AtomicInteger i = new AtomicInteger ( ) ; @ Override public void call ( List < Long > pair ) { if ( pair . size ( ) < 2 ) return ; Long startTime = pair . get ( 0 ) ; Long finishTime = pair . get ( 1 ) ; saveImageWithTimeRange ( options , files , startTime , finishTime , imageDirectory + "/map" + i . incrementAndGet ( ) + ".png" ) ; } } ) . subscribe ( reportErrors ( ) ) ;
public class DeviceImpl { /** * Get info of this device in IDL1 * @ return info * @ throws DevFailed */ @ Override public DevInfo info ( ) throws DevFailed { } }
MDC . setContextMap ( contextMap ) ; xlogger . entry ( ) ; deviceMonitoring . startRequest ( "Operation info" ) ; final DevInfo info = new DevInfo ( ) ; info . dev_class = className ; info . doc_url = "Doc URL = http://www.tango-controls.org" ; info . server_host = ServerManager . getInstance ( ) . getHostName ( ) ; info . server_id = ServerManager . getInstance ( ) . getServerName ( ) ; info . server_version = SERVER_VERSION ; xlogger . exit ( ) ; return info ;
public class SimpleMMcifConsumer { /** * The EntityPolySeq object provide the amino acid sequence objects for the Entities . * Later on the entities are mapped to the BioJava { @ link Chain } and { @ link EntityInfo } objects . * @ param epolseq the EntityPolySeq record for one amino acid */ @ Override public void newEntityPolySeq ( EntityPolySeq epolseq ) { } }
logger . debug ( "NEW entity poly seq " + epolseq ) ; int eId = - 1 ; try { eId = Integer . parseInt ( epolseq . getEntity_id ( ) ) ; } catch ( NumberFormatException e ) { logger . warn ( "Could not parse entity id from EntityPolySeq: " + e . getMessage ( ) ) ; } Entity e = getEntity ( eId ) ; if ( e == null ) { logger . info ( "Could not find entity " + epolseq . getEntity_id ( ) + ". Can not match sequence to it." ) ; return ; } Chain entityChain = getEntityChain ( epolseq . getEntity_id ( ) ) ; // first we check through the chemcomp provider , if it fails we do some heuristics to guess the type of group // TODO some of this code is analogous to getNewGroup ( ) and we should try to unify them - JD 2016-03-08 Group g = ChemCompGroupFactory . getGroupFromChemCompDictionary ( epolseq . getMon_id ( ) ) ; // int seqId = Integer . parseInt ( epolseq . getNum ( ) ) ; if ( g != null && ! g . getChemComp ( ) . isEmpty ( ) ) { if ( g instanceof AminoAcidImpl ) { AminoAcidImpl aa = ( AminoAcidImpl ) g ; aa . setRecordType ( AminoAcid . SEQRESRECORD ) ; // aa . setId ( seqId ) ; } } else { if ( epolseq . getMon_id ( ) . length ( ) == 3 && StructureTools . get1LetterCodeAmino ( epolseq . getMon_id ( ) ) != null ) { AminoAcidImpl a = new AminoAcidImpl ( ) ; a . setRecordType ( AminoAcid . SEQRESRECORD ) ; Character code1 = StructureTools . get1LetterCodeAmino ( epolseq . getMon_id ( ) ) ; a . setAminoType ( code1 ) ; g = a ; } else if ( StructureTools . isNucleotide ( epolseq . getMon_id ( ) ) ) { // the group is actually a nucleotide group . . . NucleotideImpl n = new NucleotideImpl ( ) ; g = n ; } else { logger . debug ( "Residue {} {} is not a standard aminoacid or nucleotide, will create a het group for it" , epolseq . getNum ( ) , epolseq . getMon_id ( ) ) ; HetatomImpl h = new HetatomImpl ( ) ; g = h ; } } // at this stage we don ' t know about author residue numbers ( insertion codes ) // we abuse now the ResidueNumber field setting the internal residue numbers ( label _ seq _ id , strictly sequential and follow the seqres sequence 1 to n ) // later the actual ResidueNumbers ( author residue numbers ) have to be corrected in alignSeqRes ( ) g . setResidueNumber ( ResidueNumber . fromString ( epolseq . getNum ( ) ) ) ; g . setPDBName ( epolseq . getMon_id ( ) ) ; entityChain . addGroup ( g ) ;
public class AmazonElastiCacheClient { /** * Modifies the parameters of a cache parameter group to the engine or system default value . You can reset specific * parameters by submitting a list of parameter names . To reset the entire cache parameter group , specify the * < code > ResetAllParameters < / code > and < code > CacheParameterGroupName < / code > parameters . * @ param resetCacheParameterGroupRequest * Represents the input of a < code > ResetCacheParameterGroup < / code > operation . * @ return Result of the ResetCacheParameterGroup operation returned by the service . * @ throws InvalidCacheParameterGroupStateException * The current state of the cache parameter group does not allow the requested operation to occur . * @ throws CacheParameterGroupNotFoundException * The requested cache parameter group name does not refer to an existing cache parameter group . * @ throws InvalidParameterValueException * The value for a parameter is invalid . * @ throws InvalidParameterCombinationException * Two or more incompatible parameters were specified . * @ sample AmazonElastiCache . ResetCacheParameterGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticache - 2015-02-02 / ResetCacheParameterGroup " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ResetCacheParameterGroupResult resetCacheParameterGroup ( ResetCacheParameterGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeResetCacheParameterGroup ( request ) ;
public class Searcher { /** * Gets the current numeric refinement for an attribute and an operator . * @ param attribute the attribute to refine on . * @ param operator one of the { @ link NumericRefinement # OPERATOR _ EQ operators } defined in { @ link NumericRefinement } . * @ return a { @ link NumericRefinement } describing the current refinement for these parameters , or { @ code null } if there is none . */ @ SuppressWarnings ( { } }
"WeakerAccess" , "unused" } ) // For library users @ Nullable public NumericRefinement getNumericRefinement ( @ NonNull String attribute , int operator ) { NumericRefinement . checkOperatorIsValid ( operator ) ; final SparseArray < NumericRefinement > attributeRefinements = numericRefinements . get ( attribute ) ; return attributeRefinements == null ? null : attributeRefinements . get ( operator ) ;
public class LoggingConfiguration { /** * Convert appenders specified by the property * < code > log4j . logger . asyncAppender < / code > to the blitz4j Asynchronous * appenders . * @ param props * - The properties that need to be passed into the log4j for * configuration . * @ throws ConfigurationException * @ throws FileNotFoundException */ private void convertConfiguredAppendersToAsync ( Properties props ) throws ConfigurationException , FileNotFoundException { } }
for ( Map . Entry < String , String > originalAsyncAppenderMapEntry : originalAsyncAppenderNameMap . entrySet ( ) ) { String asyncAppenderName = originalAsyncAppenderMapEntry . getValue ( ) ; props . setProperty ( LOG4J_APPENDER_PREFIX + LOG4J_APPENDER_DELIMITER + asyncAppenderName , AsyncAppender . class . getName ( ) ) ; // Set the original appender so that it can be fetched later after configuration String originalAppenderName = originalAsyncAppenderMapEntry . getKey ( ) ; props . setProperty ( LOG4J_APPENDER_PREFIX + LOG4J_APPENDER_DELIMITER + asyncAppenderName + LOG4J_APPENDER_DELIMITER + PROP_LOG4J_ORIGINAL_APPENDER_NAME , originalAppenderName ) ; // Set the batcher to reject the collector request instead of it // participating in processing this . initialProps . setProperty ( "batcher." + AsyncAppender . class . getName ( ) + "." + originalAppenderName + "." + "rejectWhenFull" , "true" ) ; // Set the default value of the processing max threads to 1 , if a // value is not specified String maxThreads = this . initialProps . getProperty ( "batcher." + AsyncAppender . class . getName ( ) + "." + originalAppenderName + "." + "maxThreads" ) ; if ( maxThreads == null ) { this . initialProps . setProperty ( "batcher." + AsyncAppender . class . getName ( ) + "." + originalAppenderName + "." + "maxThreads" , "1" ) ; } for ( Map . Entry mapEntry : props . entrySet ( ) ) { String key = mapEntry . getKey ( ) . toString ( ) ; if ( ( key . contains ( LOG4J_PREFIX ) || key . contains ( ROOT_CATEGORY ) || key . contains ( ROOT_LOGGER ) ) && ! key . contains ( PROP_LOG4J_ASYNC_APPENDERS ) && ! key . contains ( PROP_LOG4J_ORIGINAL_APPENDER_NAME ) ) { Object value = mapEntry . getValue ( ) ; if ( value != null ) { String [ ] values = ( String . class . cast ( value ) ) . split ( "," ) ; String valueString = "" ; int ctr = 0 ; for ( String oneValue : values ) { if ( oneValue == null ) { continue ; } ++ ctr ; if ( originalAppenderName . equals ( oneValue . trim ( ) ) ) { oneValue = asyncAppenderName ; } if ( ctr != values . length ) { valueString = valueString + oneValue + "," ; } else { valueString = valueString + oneValue ; } } mapEntry . setValue ( valueString ) ; } } } }
public class ApiOvhDedicatedCloud { /** * Enable Zerto replication between your OVH Private Cloud and your onsite infrastructure * REST : POST / dedicatedCloud / { serviceName } / datacenter / { datacenterId } / disasterRecovery / zertoSingle / enable * @ param ovhEndpointIp [ required ] Your OVH Private Cloud public IP for the secured replication data tunnel endpoint * @ param remoteVraNetwork [ required ] Internal zerto subnet of your onsite infrastructure ( ip / cidr ) * @ param localVraNetwork [ required ] Internal zerto subnet for your OVH Private Cloud ( ip / cidr ) * @ param serviceName [ required ] Domain of the service * @ param datacenterId [ required ] * API beta */ public OvhTask serviceName_datacenter_datacenterId_disasterRecovery_zertoSingle_enable_POST ( String serviceName , Long datacenterId , String localVraNetwork , String ovhEndpointIp , String remoteVraNetwork ) throws IOException { } }
String qPath = "/dedicatedCloud/{serviceName}/datacenter/{datacenterId}/disasterRecovery/zertoSingle/enable" ; StringBuilder sb = path ( qPath , serviceName , datacenterId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "localVraNetwork" , localVraNetwork ) ; addBody ( o , "ovhEndpointIp" , ovhEndpointIp ) ; addBody ( o , "remoteVraNetwork" , remoteVraNetwork ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class DSSPParser { /** * Parse a DSSP format String and return the secondary structure * annotation as a List of { @ link SecStrucState } objects . * @ param dsspOut String with the DSSP output to parse * @ param structure Structure object associated to the dssp * @ param assign assigns the SS to the structure if true * @ return a List of SS annotation objects * @ throws StructureException * @ throws IOException */ public static List < SecStrucState > parseString ( String dsspOut , Structure structure , boolean assign ) throws IOException , StructureException { } }
Reader read = new StringReader ( dsspOut ) ; BufferedReader reader = new BufferedReader ( read ) ; return generalParse ( reader , structure , assign ) ;
public class UserStub { /** * Elements to display when outputting in simplified format , e . g . , to DOT file * @ return Iterable */ @ Override public scala . collection . Iterable < String > simplifiedDisplayableElements ( ) { } }
LinkedList < String > elements = new LinkedList < > ( ) ; elements . add ( displayableDataId ( ) ) ; return linkedListToScalaIterable ( elements ) ;
public class XmlHandler { /** * { @ inheritDoc } */ @ Override protected String format ( String request ) { } }
String result = null ; ByteArrayOutputStream bos = new ByteArrayOutputStream ( ) ; try { ByteArrayInputStream bis = new ByteArrayInputStream ( request . getBytes ( ) ) ; Document doc = new SAXBuilder ( ) . build ( bis ) ; bis . close ( ) ; Format format = Format . getCompactFormat ( ) ; // To not have the ? in the declaration interpreted as regular expressions . format . setOmitDeclaration ( true ) ; XMLOutputter out = new XMLOutputter ( format ) ; out . output ( doc , bos ) ; result = new String ( bos . toByteArray ( ) ) ; bos . close ( ) ; } catch ( Exception e ) { log . error ( null , e ) ; } return result ;
public class SubnetworkClient { /** * Sets the access control policy on the specified resource . Replaces any existing policy . * < p > Sample code : * < pre > < code > * try ( SubnetworkClient subnetworkClient = SubnetworkClient . create ( ) ) { * ProjectRegionSubnetworkResourceName resource = ProjectRegionSubnetworkResourceName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ RESOURCE ] " ) ; * RegionSetPolicyRequest regionSetPolicyRequestResource = RegionSetPolicyRequest . newBuilder ( ) . build ( ) ; * Policy response = subnetworkClient . setIamPolicySubnetwork ( resource . toString ( ) , regionSetPolicyRequestResource ) ; * < / code > < / pre > * @ param resource Name or id of the resource for this request . * @ param regionSetPolicyRequestResource * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Policy setIamPolicySubnetwork ( String resource , RegionSetPolicyRequest regionSetPolicyRequestResource ) { } }
SetIamPolicySubnetworkHttpRequest request = SetIamPolicySubnetworkHttpRequest . newBuilder ( ) . setResource ( resource ) . setRegionSetPolicyRequestResource ( regionSetPolicyRequestResource ) . build ( ) ; return setIamPolicySubnetwork ( request ) ;
public class AbstractRpcServer { /** * Starts this server . * < p > This is a synchronous operation . < / p > */ public void startUp ( ) throws Exception { } }
logger . info ( String . format ( "Starting RPC server on port %d" , port ) ) ; ServerBootstrap bootstrap = new ServerBootstrap ( ) ; ChannelFuture futureChannel = bootstrap . group ( parentGroup , childGroup ) . channel ( channelClass ) . childHandler ( channelInitializer ( ) ) . bind ( port ) . awaitUninterruptibly ( ) ; if ( futureChannel . isSuccess ( ) ) { this . channel = futureChannel . channel ( ) ; logger . info ( "RPC server started successfully." ) ; } else { logger . info ( "Failed to start RPC server." ) ; throw new Exception ( futureChannel . cause ( ) ) ; }
public class TextGroupEntryPoints { /** * Runs a { @ link TextGroupEntryPoint } . The entry point class will be injected using a { @ link * Injector } configured by two modules : a { @ link com . bbn . bue . common . parameters . ParametersModule } * initialized by loading the first ( and required - to - be - only ) argument in { @ code args } and an * instance of { @ code configModuleClass } instantiated according to the rules of { @ link * ModuleUtils # instantiateModule ( Class , Parameters ) } . This method will then call { @ link * TextGroupEntryPoint # run ( ) } on the injected entry point object . * This method also provides some convenient features for debugging , controlled by parameters : * < ul > * < li > { @ code ccom . bbn . bue . common . debug . allowExceptionsToPassUncaught } : ( default false ) In * production use , we always want to catch exceptions and explicitly exit to ensure a non - zero * exit code ( which the JVM does not guarantee ) . However , sometimes in debugging it is useful to * suppress this behavior , because it blocks IntelliJ ' s useful " break on uncaught exceptions only " * option . Therefore we allow the user to disable it with this parameter . < / li > * < li > { @ code com . bbn . bue . common . debug . graphGuiceDependenciesTo } : ( default absent ) If specified , a * { @ code dot } file for the Guice dependencies will be written to the specified file . If { @ code * com . bbn . bue . common . debug . skipExecution } is specified and true , the normal execution of the * program will be skipped . < / li > * < / ul > */ public static void runEntryPoint ( Class < ? extends TextGroupEntryPoint > entryPointClass , Class < ? extends Module > configModuleClass , String [ ] args ) throws Exception { } }
runEntryPointInternal ( entryPointClass , args , configModuleClass ) ;
public class TransactionRequestProcessor { /** * Handles a rollback request from a client . */ void rollbackTransaction ( HotRodHeader header , Subject subject , XidImpl xid ) { } }
RollbackTransactionOperation operation = new RollbackTransactionOperation ( header , server , subject , xid , this :: writeTransactionResponse ) ; executor . execute ( operation ) ;
public class BaseCrawler { /** * Creates a crawl request for the redirected URL , feeds it to the crawler and calls the * appropriate event callback . * @ param currentCrawlCandidate the current crawl candidate * @ param redirectedUrl the URL of the redirected request */ private void handleRequestRedirect ( final CrawlCandidate currentCrawlCandidate , final String redirectedUrl ) { } }
CrawlRequestBuilder builder = new CrawlRequestBuilder ( redirectedUrl ) . setPriority ( currentCrawlCandidate . getPriority ( ) ) ; currentCrawlCandidate . getMetadata ( ) . ifPresent ( builder :: setMetadata ) ; CrawlRequest redirectedRequest = builder . build ( ) ; crawlFrontier . feedRequest ( redirectedRequest , false ) ; callbackManager . call ( CrawlEvent . REQUEST_REDIRECT , new RequestRedirectEvent ( currentCrawlCandidate , redirectedRequest ) ) ;
public class Reflector { /** * calls a Static Method on the given CLass * @ param clazz Class to call Method on it * @ param methodName Name of the Method to get * @ param args Arguments of the Method to get * @ return return return value of the called Method * @ throws PageException */ public static Object callStaticMethod ( Class clazz , String methodName , Object [ ] args ) throws PageException { } }
try { return getMethodInstance ( null , clazz , methodName , args ) . invoke ( null ) ; } catch ( InvocationTargetException e ) { Throwable target = e . getTargetException ( ) ; if ( target instanceof PageException ) throw ( PageException ) target ; throw Caster . toPageException ( e . getTargetException ( ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; }
public class GeneralizedParetoDistribution { /** * Quantile function of GPD distribution * @ param val Value * @ param mu Location parameter mu * @ param sigma Scale parameter sigma * @ param xi Shape parameter xi ( = - kappa ) * @ return Quantile function at position x . */ public static double quantile ( double val , double mu , double sigma , double xi ) { } }
if ( val < 0.0 || val > 1.0 ) { return Double . NaN ; } if ( xi == 0. ) { return mu - sigma * FastMath . log ( 1 - val ) ; } return mu - sigma / xi * ( 1 - FastMath . pow ( 1 - val , - xi ) ) ;
public class Logging { /** * Get logging target for the specified devices * @ param dev _ name The device names */ public String [ ] get_logging_target ( String dev_name ) throws DevFailed { } }
// - Get device by name DeviceImpl dev = Util . instance ( ) . get_device_by_name ( dev_name ) ; // - Get device targets ( i . e appenders ) Enumeration all_appenders = dev . get_logger ( ) . getAllAppenders ( ) ; // - Instanciate returned value int num_appenders = 0 ; Enumeration a_shame_copy = dev . get_logger ( ) . getAllAppenders ( ) ; while ( a_shame_copy . hasMoreElements ( ) ) { num_appenders ++ ; a_shame_copy . nextElement ( ) ; } String [ ] targets = new String [ num_appenders ] ; // - Populate the returned value num_appenders = 0 ; while ( all_appenders . hasMoreElements ( ) ) { Appender appender = ( Appender ) all_appenders . nextElement ( ) ; targets [ num_appenders ++ ] = appender . getName ( ) ; } return targets ;
public class Replicator { /** * Finds a document in the replicator database . * @ return { @ link ReplicatorDocument } */ public ReplicatorDocument find ( ) { } }
assertNotEmpty ( replicatorDoc . getId ( ) , "Doc id" ) ; final URI uri = new DatabaseURIHelper ( dbURI ) . documentUri ( replicatorDoc . getId ( ) , replicatorDoc . getRevision ( ) ) ; return client . get ( uri , ReplicatorDocument . class ) ;
public class GeneFeatureHelper { /** * Load GFF2 feature file generated from the geneid prediction algorithm and map features onto the chromosome sequences * @ param chromosomeSequenceList * @ param listGenes * @ throws Exception */ static public void addGeneIDGFF2GeneFeatures ( LinkedHashMap < String , ChromosomeSequence > chromosomeSequenceList , FeatureList listGenes ) throws Exception { } }
Collection < String > geneIds = listGenes . attributeValues ( "gene_id" ) ; for ( String geneid : geneIds ) { FeatureList gene = listGenes . selectByAttribute ( "gene_id" , geneid ) ; FeatureI geneFeature = gene . get ( 0 ) ; ChromosomeSequence seq = chromosomeSequenceList . get ( geneFeature . seqname ( ) ) ; geneid = geneid . replaceAll ( "_" , ".G" ) ; AccessionID geneAccessionID = new AccessionID ( geneid ) ; GeneSequence geneSequence = null ; Collection < String > transcriptids = gene . attributeValues ( "gene_id" ) ; for ( String transcriptid : transcriptids ) { // get all the individual features ( exons , CDS regions , etc . ) of this gene FeatureList transcriptFeature = listGenes . selectByAttribute ( "gene_id" , transcriptid ) ; transcriptid = transcriptid . replaceAll ( "_" , ".G" ) ; // String seqName = feature . seqname ( ) ; // FeatureI startCodon = null ; // FeatureI stopCodon = null ; Integer startCodonBegin = null ; Integer stopCodonEnd = null ; // String startCodonName = " " ; // String stopCodonName = " " ; // now select only the coding regions of this gene FeatureList firstFeatures = transcriptFeature . selectByType ( "First" ) ; FeatureList terminalFeatures = transcriptFeature . selectByType ( "Terminal" ) ; FeatureList internalFeatures = transcriptFeature . selectByType ( "Internal" ) ; FeatureList singleFeatures = transcriptFeature . selectByType ( "Single" ) ; FeatureList cdsFeatures = new FeatureList ( ) ; cdsFeatures . add ( firstFeatures ) ; cdsFeatures . add ( terminalFeatures ) ; cdsFeatures . add ( internalFeatures ) ; cdsFeatures . add ( singleFeatures ) ; // sort them cdsFeatures = cdsFeatures . sortByStart ( ) ; Strand strand = Strand . POSITIVE ; FeatureI feature = cdsFeatures . get ( 0 ) ; if ( feature . location ( ) . isNegative ( ) ) { strand = Strand . NEGATIVE ; } if ( startCodonBegin == null ) { FeatureI firstFeature = cdsFeatures . get ( 0 ) ; if ( strand == Strand . NEGATIVE ) { startCodonBegin = firstFeature . location ( ) . bioEnd ( ) ; } else { startCodonBegin = firstFeature . location ( ) . bioStart ( ) ; } } if ( stopCodonEnd == null ) { FeatureI lastFeature = cdsFeatures . get ( cdsFeatures . size ( ) - 1 ) ; if ( strand == Strand . NEGATIVE ) { stopCodonEnd = lastFeature . location ( ) . bioStart ( ) ; } else { stopCodonEnd = lastFeature . location ( ) . bioEnd ( ) ; } } // for gtf ordering can be strand based so first is last and last is first if ( startCodonBegin > stopCodonEnd ) { int temp = startCodonBegin ; startCodonBegin = stopCodonEnd ; stopCodonEnd = temp ; } AccessionID transcriptAccessionID = new AccessionID ( transcriptid ) ; if ( geneSequence == null ) { geneSequence = seq . addGene ( geneAccessionID , startCodonBegin , stopCodonEnd , strand ) ; geneSequence . setSource ( ( ( Feature ) feature ) . source ( ) ) ; } else { // if multiple transcripts for one gene make sure the gene is defined as the min and max start / end if ( startCodonBegin < geneSequence . getBioBegin ( ) ) { geneSequence . setBioBegin ( startCodonBegin ) ; } if ( stopCodonEnd > geneSequence . getBioBegin ( ) ) { geneSequence . setBioEnd ( stopCodonEnd ) ; } } TranscriptSequence transcriptSequence = geneSequence . addTranscript ( transcriptAccessionID , startCodonBegin , stopCodonEnd ) ; /* if ( startCodon ! = null ) { if ( startCodonName = = null | | startCodonName . length ( ) = = 0 ) { startCodonName = transcriptid + " - start _ codon - " + startCodon . location ( ) . bioStart ( ) + " - " + startCodon . location ( ) . bioEnd ( ) ; transcriptSequence . addStartCodonSequence ( new AccessionID ( startCodonName ) , startCodon . location ( ) . bioStart ( ) , startCodon . location ( ) . bioEnd ( ) ) ; if ( stopCodon ! = null ) { if ( stopCodonName = = null | | stopCodonName . length ( ) = = 0 ) { stopCodonName = transcriptid + " - stop _ codon - " + stopCodon . location ( ) . bioStart ( ) + " - " + stopCodon . location ( ) . bioEnd ( ) ; transcriptSequence . addStopCodonSequence ( new AccessionID ( stopCodonName ) , stopCodon . location ( ) . bioStart ( ) , stopCodon . location ( ) . bioEnd ( ) ) ; */ for ( FeatureI cdsFeature : cdsFeatures ) { Feature cds = ( Feature ) cdsFeature ; String cdsName = cds . getAttribute ( "transcript_name" ) ; if ( cdsName == null || cdsName . length ( ) == 0 ) { cdsName = transcriptid + "-cds-" + cds . location ( ) . bioStart ( ) + "-" + cds . location ( ) . bioEnd ( ) ; } AccessionID cdsAccessionID = new AccessionID ( cdsName ) ; // ExonSequence exonSequence = geneSequence . addExon ( cdsAccessionID , cdsFeature . location ( ) . bioStart ( ) , cdsFeature . location ( ) . bioEnd ( ) ) ; CDSSequence cdsSequence = transcriptSequence . addCDS ( cdsAccessionID , cdsFeature . location ( ) . bioStart ( ) , cdsFeature . location ( ) . bioEnd ( ) , cds . frame ( ) ) ; cdsSequence . setSequenceScore ( cds . score ( ) ) ; } } }
public class RunnersApi { /** * List jobs that are being processed or were processed by specified Runner . * < pre > < code > GitLab Endpoint : GET / runners / : id / jobs < / code > < / pre > * @ param runnerId The ID of a runner * @ param status Status of the job ; one of : running , success , failed , canceled * @ return List jobs that are being processed or were processed by specified Runner * @ throws GitLabApiException if any exception occurs */ public List < Job > getJobs ( Integer runnerId , JobStatus status ) throws GitLabApiException { } }
return ( getJobs ( runnerId , status , getDefaultPerPage ( ) ) . all ( ) ) ;
public class JSONWorldDataHelper { /** * Builds the basic life world data to be used as observation signals by the listener . * @ param json a JSON object into which the life stats will be added . */ public static void buildLifeStats ( JsonObject json , EntityPlayerMP player ) { } }
json . addProperty ( "Life" , player . getHealth ( ) ) ; json . addProperty ( "Score" , player . getScore ( ) ) ; // Might always be the same as XP ? json . addProperty ( "Food" , player . getFoodStats ( ) . getFoodLevel ( ) ) ; json . addProperty ( "XP" , player . experienceTotal ) ; json . addProperty ( "IsAlive" , ! player . isDead ) ; json . addProperty ( "Air" , player . getAir ( ) ) ; json . addProperty ( "Name" , player . getName ( ) ) ;
public class DateTimeUtil { /** * 获取季度的第一天 * @ param date 时间 ( { @ link Date } ) * @ return 时间 ( { @ link java . util . Date } ) , 如果date is null , 将返回null */ public static Date getFirstDateOfSeason ( Date date ) { } }
if ( date == null ) return null ; return getFirstDateOfMonth ( getSeasonDate ( date ) [ 0 ] ) ;
public class EJBMethodInfoImpl { public String getAMCName ( ) { } }
// @ MD16426A J2EEName j2eeName = bmd . getJ2EEName ( ) ; // @ MD16426A String app = j2eeName . getApplication ( ) ; // @ MD16426A String mod = j2eeName . getModule ( ) ; // @ MD16426A String comp = j2eeName . getComponent ( ) ; // @ MD16426A String retval = app + "::" + mod + "::" + comp ; // @ PK07137 return retval ; // @ MD16426A
public class IndustryApi { /** * List solar system cost indices Return cost indices for solar systems - - - * This route is cached for up to 3600 seconds * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ return List & lt ; IndustrySystemsResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < IndustrySystemsResponse > getIndustrySystems ( String datasource , String ifNoneMatch ) throws ApiException { } }
ApiResponse < List < IndustrySystemsResponse > > resp = getIndustrySystemsWithHttpInfo ( datasource , ifNoneMatch ) ; return resp . getData ( ) ;
public class RetrievalWorker { /** * Renames the given file , returns the copied file . Does not change * the original passed in file path . */ protected File renameFile ( File localFile ) throws IOException { } }
File origFile = new File ( localFile . getAbsolutePath ( ) ) ; File copiedFile = new File ( localFile . getParent ( ) , localFile . getName ( ) + COPY ) ; for ( int i = 2 ; copiedFile . exists ( ) ; i ++ ) { copiedFile = new File ( localFile . getParent ( ) , localFile . getName ( ) + COPY + "-" + i ) ; } FileUtils . moveFile ( origFile , copiedFile ) ; return copiedFile ;
public class GraphicsUtilities { /** * Draws an image on top of a component by doing a 3x3 grid stretch of the image * using the specified insets . * @ param g the graphics object * @ param comp the component * @ param img the image * @ param ins the insets */ public static void tileStretchPaint ( Graphics g , JComponent comp , BufferedImage img , Insets ins ) { } }
int left = ins . left ; int right = ins . right ; int top = ins . top ; int bottom = ins . bottom ; // top g . drawImage ( img , 0 , 0 , left , top , 0 , 0 , left , top , null ) ; g . drawImage ( img , left , 0 , comp . getWidth ( ) - right , top , left , 0 , img . getWidth ( ) - right , top , null ) ; g . drawImage ( img , comp . getWidth ( ) - right , 0 , comp . getWidth ( ) , top , img . getWidth ( ) - right , 0 , img . getWidth ( ) , top , null ) ; // middle g . drawImage ( img , 0 , top , left , comp . getHeight ( ) - bottom , 0 , top , left , img . getHeight ( ) - bottom , null ) ; g . drawImage ( img , left , top , comp . getWidth ( ) - right , comp . getHeight ( ) - bottom , left , top , img . getWidth ( ) - right , img . getHeight ( ) - bottom , null ) ; g . drawImage ( img , comp . getWidth ( ) - right , top , comp . getWidth ( ) , comp . getHeight ( ) - bottom , img . getWidth ( ) - right , top , img . getWidth ( ) , img . getHeight ( ) - bottom , null ) ; // bottom g . drawImage ( img , 0 , comp . getHeight ( ) - bottom , left , comp . getHeight ( ) , 0 , img . getHeight ( ) - bottom , left , img . getHeight ( ) , null ) ; g . drawImage ( img , left , comp . getHeight ( ) - bottom , comp . getWidth ( ) - right , comp . getHeight ( ) , left , img . getHeight ( ) - bottom , img . getWidth ( ) - right , img . getHeight ( ) , null ) ; g . drawImage ( img , comp . getWidth ( ) - right , comp . getHeight ( ) - bottom , comp . getWidth ( ) , comp . getHeight ( ) , img . getWidth ( ) - right , img . getHeight ( ) - bottom , img . getWidth ( ) , img . getHeight ( ) , null ) ;
public class FileServersInner { /** * Creates a file server . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param fileServerName The name of the file server within the specified resource group . File server names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param parameters The parameters to provide for file server creation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < FileServerInner > beginCreateAsync ( String resourceGroupName , String fileServerName , FileServerCreateParameters parameters , final ServiceCallback < FileServerInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , fileServerName , parameters ) , serviceCallback ) ;
public class TypeCheck { /** * Visits the loop variable of a FOR _ OF and FOR _ AWAIT _ OF and verifies the type being assigned to * it . */ private void checkForOfTypes ( NodeTraversal t , Node forOf ) { } }
Node lhs = forOf . getFirstChild ( ) ; Node iterable = forOf . getSecondChild ( ) ; JSType iterableType = getJSType ( iterable ) ; JSType actualType ; if ( forOf . isForAwaitOf ( ) ) { Optional < JSType > maybeType = validator . expectAutoboxesToIterableOrAsyncIterable ( iterable , iterableType , "Can only async iterate over a (non-null) Iterable or AsyncIterable type" ) ; if ( ! maybeType . isPresent ( ) ) { // Not iterable or async iterable , error reported by // expectAutoboxesToIterableOrAsyncIterable . return ; } actualType = maybeType . get ( ) ; } else { validator . expectAutoboxesToIterable ( iterable , iterableType , "Can only iterate over a (non-null) Iterable type" ) ; actualType = // Convert primitives to their wrapper type and remove null / undefined // If iterable is a union type , autoboxes each member of the union . iterableType . autobox ( ) . getTemplateTypeMap ( ) . getResolvedTemplateType ( typeRegistry . getIterableTemplate ( ) ) ; } if ( NodeUtil . isNameDeclaration ( lhs ) ) { // e . g . get " x " given the VAR in " for ( var x of arr ) { " lhs = lhs . getFirstChild ( ) ; } if ( lhs . isDestructuringLhs ( ) ) { // e . g . get ` [ x , y ] ` given the VAR in ` for ( var [ x , y ] of arr ) { ` lhs = lhs . getFirstChild ( ) ; } checkCanAssignToWithScope ( t , forOf , lhs , actualType , lhs . getJSDocInfo ( ) , "declared type of for-of loop variable does not match inferred type" ) ;
public class U { /** * Documented , # compose */ @ SuppressWarnings ( "unchecked" ) public static < T > Function < T , T > compose ( final Function < T , T > ... func ) { } }
return new Function < T , T > ( ) { public T apply ( final T arg ) { T result = arg ; for ( int index = func . length - 1 ; index >= 0 ; index -= 1 ) { result = func [ index ] . apply ( result ) ; } return result ; } } ;
public class PostgreSQLCommandExecutorFactory { /** * Create new instance of command executor . * @ param commandPacketType command packet type for PostgreSQL * @ param commandPacket command packet for PostgreSQL * @ param backendConnection backend connection * @ return command executor */ public static CommandExecutor newInstance ( final PostgreSQLCommandPacketType commandPacketType , final PostgreSQLCommandPacket commandPacket , final BackendConnection backendConnection ) { } }
log . debug ( "Execute packet type: {}, value: {}" , commandPacketType , commandPacket ) ; switch ( commandPacketType ) { case QUERY : return new PostgreSQLComQueryExecutor ( ( PostgreSQLComQueryPacket ) commandPacket , backendConnection ) ; case PARSE : return new PostgreSQLComParseExecutor ( ( PostgreSQLComParsePacket ) commandPacket , backendConnection ) ; case BIND : return new PostgreSQLComBindExecutor ( ( PostgreSQLComBindPacket ) commandPacket , backendConnection ) ; case DESCRIBE : return new PostgreSQLComDescribeExecutor ( ) ; case EXECUTE : return new PostgreSQLComExecuteExecutor ( ) ; case SYNC : return new PostgreSQLComSyncExecutor ( ) ; case TERMINATE : return new PostgreSQLComTerminationExecutor ( ) ; default : return new PostgreSQLUnsupportedCommandExecutor ( ) ; }
public class NvWebSocket { /** * Removes current web socket instance . */ protected void dispose ( ) { } }
final WebSocket currentWebSocket = webSocket ; if ( currentWebSocket != null && currentWebSocket . isOpen ( ) ) { try { currentWebSocket . disconnect ( WebSocketCloseCode . AWAY . getCode ( ) ) ; } catch ( final Exception exception ) { postErrorEvent ( exception ) ; } }
public class RemoteWebElement { /** * TODO freyanud no return here . */ private void clickAtom ( ) { } }
try { String f = "(function(arg) { " + "var text = " + IosAtoms . CLICK + "(arg);" + "return text;})" ; JSONArray args = new JSONArray ( ) ; args . put ( new JSONObject ( ) . put ( "objectId" , getRemoteObject ( ) . getId ( ) ) ) ; JSONObject response = getInspectorResponse ( f , args , true ) ; inspector . cast ( response ) ; } catch ( JSONException e ) { throw new WebDriverException ( e ) ; }
public class Parser { /** * Parse type variables for generics * @ param typeVariable * @ return */ protected TypeParameter parseTypeParameter ( TypeVariable typeVariable ) { } }
TypeParameter typeParameter = objectFactory . createTypeParameter ( ) ; typeParameter . setName ( typeVariable . typeName ( ) ) ; for ( Type bound : typeVariable . bounds ( ) ) { typeParameter . getBound ( ) . add ( bound . qualifiedTypeName ( ) ) ; } return typeParameter ;
public class Misc { /** * Utility to join strings with a separator . Skips null strings and does not append a trailing * separator . * @ param separator the string to use to separate the entries * @ param stringsToJoin the strings to join together * @ return the joined string */ public static String join ( String separator , Collection < String > stringsToJoin ) { } }
StringBuilder builder = new StringBuilder ( ) ; // Check if there is at least 1 element then use do / while to avoid trailing separator int index = stringsToJoin . size ( ) ; for ( String str : stringsToJoin ) { index -- ; if ( str != null ) { builder . append ( str ) ; if ( index > 0 ) { builder . append ( separator ) ; } } } return builder . toString ( ) ;
public class DCModuleParser { /** * Parse an element tree and return the module found in it . * @ param dcRoot the root element containing the module elements . * @ param locale for date / time parsing * @ return the module parsed from the element tree , < i > null < / i > if none . */ @ Override public Module parse ( final Element dcRoot , final Locale locale ) { } }
boolean foundSomething = false ; final DCModule dcm = new DCModuleImpl ( ) ; final List < Element > titles = dcRoot . getChildren ( "title" , getDCNamespace ( ) ) ; if ( ! titles . isEmpty ( ) ) { foundSomething = true ; dcm . setTitles ( parseElementList ( titles ) ) ; } final List < Element > creators = dcRoot . getChildren ( "creator" , getDCNamespace ( ) ) ; if ( ! creators . isEmpty ( ) ) { foundSomething = true ; dcm . setCreators ( parseElementList ( creators ) ) ; } final List < Element > subjects = dcRoot . getChildren ( "subject" , getDCNamespace ( ) ) ; if ( ! subjects . isEmpty ( ) ) { foundSomething = true ; dcm . setSubjects ( parseSubjects ( subjects ) ) ; } final List < Element > descriptions = dcRoot . getChildren ( "description" , getDCNamespace ( ) ) ; if ( ! descriptions . isEmpty ( ) ) { foundSomething = true ; dcm . setDescriptions ( parseElementList ( descriptions ) ) ; } final List < Element > publishers = dcRoot . getChildren ( "publisher" , getDCNamespace ( ) ) ; if ( ! publishers . isEmpty ( ) ) { foundSomething = true ; dcm . setPublishers ( parseElementList ( publishers ) ) ; } final List < Element > contributors = dcRoot . getChildren ( "contributor" , getDCNamespace ( ) ) ; if ( ! contributors . isEmpty ( ) ) { foundSomething = true ; dcm . setContributors ( parseElementList ( contributors ) ) ; } final List < Element > dates = dcRoot . getChildren ( "date" , getDCNamespace ( ) ) ; if ( ! dates . isEmpty ( ) ) { foundSomething = true ; dcm . setDates ( parseElementListDate ( dates , locale ) ) ; } final List < Element > types = dcRoot . getChildren ( "type" , getDCNamespace ( ) ) ; if ( ! types . isEmpty ( ) ) { foundSomething = true ; dcm . setTypes ( parseElementList ( types ) ) ; } final List < Element > formats = dcRoot . getChildren ( "format" , getDCNamespace ( ) ) ; if ( ! formats . isEmpty ( ) ) { foundSomething = true ; dcm . setFormats ( parseElementList ( formats ) ) ; } final List < Element > identifiers = dcRoot . getChildren ( "identifier" , getDCNamespace ( ) ) ; if ( ! identifiers . isEmpty ( ) ) { foundSomething = true ; dcm . setIdentifiers ( parseElementList ( identifiers ) ) ; } final List < Element > sources = dcRoot . getChildren ( "source" , getDCNamespace ( ) ) ; if ( ! sources . isEmpty ( ) ) { foundSomething = true ; dcm . setSources ( parseElementList ( sources ) ) ; } final List < Element > languages = dcRoot . getChildren ( "language" , getDCNamespace ( ) ) ; if ( ! languages . isEmpty ( ) ) { foundSomething = true ; dcm . setLanguages ( parseElementList ( languages ) ) ; } final List < Element > relations = dcRoot . getChildren ( "relation" , getDCNamespace ( ) ) ; if ( ! relations . isEmpty ( ) ) { foundSomething = true ; dcm . setRelations ( parseElementList ( relations ) ) ; } final List < Element > coverages = dcRoot . getChildren ( "coverage" , getDCNamespace ( ) ) ; if ( ! coverages . isEmpty ( ) ) { foundSomething = true ; dcm . setCoverages ( parseElementList ( coverages ) ) ; } final List < Element > rights = dcRoot . getChildren ( "rights" , getDCNamespace ( ) ) ; if ( ! rights . isEmpty ( ) ) { foundSomething = true ; dcm . setRightsList ( parseElementList ( rights ) ) ; } if ( foundSomething ) { return dcm ; } else { return null ; }
public class GenericsResolutionUtils { /** * Resolve type generics by declaration ( as upper bound ) . Used for cases when actual generic definition is not * available ( so actual generics are unknown ) . In most cases such generics resolved as Object * ( for example , { @ code Some < T > } ) . * IMPORTANT : this method does not count possible outer class generics ! Use * { @ link # resolveRawGeneric ( TypeVariable , LinkedHashMap ) } as universal resolution method * @ param type class to analyze generics for * @ return resolved generics or empty map if not generics used * @ see # resolveRawGenerics ( Class ) to include outer type generics */ public static LinkedHashMap < String , Type > resolveDirectRawGenerics ( final Class < ? > type ) { } }
final TypeVariable [ ] declaredGenerics = type . getTypeParameters ( ) ; if ( declaredGenerics . length == 0 ) { return EmptyGenericsMap . getInstance ( ) ; } final LinkedHashMap < String , Type > res = new LinkedHashMap < String , Type > ( ) ; final List < TypeVariable > failed = new ArrayList < TypeVariable > ( ) ; // variables in declaration could be dependant and in any direction ( e . g . < A extends List < B > , B > ) // so assuming correct order at first , but if we face any error - order vars and resolve correctly // ( it ' s better to avoid ordering by default as its required quite rarely ) for ( TypeVariable variable : declaredGenerics ) { try { res . put ( variable . getName ( ) , resolveRawGeneric ( variable , res ) ) ; } catch ( UnknownGenericException ex ) { // preserve order without resolution res . put ( variable . getName ( ) , null ) ; failed . add ( variable ) ; } } if ( ! failed . isEmpty ( ) ) { for ( TypeVariable variable : GenericsUtils . orderVariablesForResolution ( failed ) ) { // replacing nulls in map res . put ( variable . getName ( ) , resolveRawGeneric ( variable , res ) ) ; } } return res ;
public class DenseOpticalFlowBlockPyramid { /** * Examines every pixel inside the region centered at ( cx , cy ) to see if their optical flow has a worse * score the one specified in ' flow ' */ protected void checkNeighbors ( int cx , int cy , ImageFlow . D flow , ImageFlow image , float score ) { } }
for ( int i = - regionRadius ; i <= regionRadius ; i ++ ) { int index = image . width * ( cy + i ) + ( cx - regionRadius ) ; for ( int j = - regionRadius ; j <= regionRadius ; j ++ , index ++ ) { float s = scores [ index ] ; ImageFlow . D f = image . data [ index ] ; if ( s > score ) { f . set ( flow ) ; scores [ index ] = score ; } else if ( s == score ) { // Pick solution with the least motion when ambiguous float m0 = f . x * f . x + f . y * f . y ; float m1 = flow . x * flow . x + flow . y * flow . y ; if ( m1 < m0 ) { f . set ( flow ) ; scores [ index ] = score ; } } } }
public class SdpComparator { /** * Negotiates the video formats to be used in the call . * @ param sdp The session description * @ param formats The available formats * @ return The supported formats . If no formats are supported the returned list will be empty . */ public RTPFormats negotiateVideo ( SessionDescription sdp , RTPFormats formats ) { } }
this . video . clean ( ) ; MediaDescriptorField descriptor = sdp . getVideoDescriptor ( ) ; descriptor . getFormats ( ) . intersection ( formats , this . video ) ; return this . video ;
public class AbstractDeclarativeValidator { /** * Handles exceptions occuring during execution of validation code . * By default this method will swallow { @ link NullPointerException NullPointerExceptions } and { @ link GuardException } s . * Clients may override this method to propagate { @ link NullPointerException NullPointerExceptions } or more smarter * handling . * @ since 2.17 */ protected void handleExceptionDuringValidation ( Throwable targetException ) throws RuntimeException { } }
// ignore NullPointerException , as not having to check for NPEs all the time is a convenience feature // ignore GuardException , check is just not evaluated if guard is false if ( ! ( targetException instanceof GuardException ) && ! ( targetException instanceof NullPointerException ) ) { Exceptions . throwUncheckedException ( targetException ) ; }
public class GatewayServer { private synchronized void internalActivateTopology ( Topology topology , File topoDir ) throws IOException , ZipException , ParserConfigurationException , TransformerException , SAXException { } }
log . activatingTopology ( topology . getName ( ) ) ; File [ ] files = topoDir . listFiles ( new RegexDirFilter ( "%.*" ) ) ; if ( files != null ) { for ( File file : files ) { internalActivateArchive ( topology , file ) ; } }
public class ImmutableGrid { /** * Obtains an immutable grid by copying a set of cells , deriving the row and column count . * The row and column counts are calculated as the maximum row and column specified . * @ param < R > the type of the value * @ param cells the cells to copy , not null * @ return the immutable grid , not null * @ throws IndexOutOfBoundsException if either index is less than zero */ public static < R > ImmutableGrid < R > copyOfDeriveCounts ( Iterable < ? extends Cell < R > > cells ) { } }
if ( cells == null ) { throw new IllegalArgumentException ( "Cells must not be null" ) ; } if ( ! cells . iterator ( ) . hasNext ( ) ) { return new EmptyGrid < R > ( ) ; } int rowCount = 0 ; int columnCount = 0 ; for ( Cell < R > cell : cells ) { rowCount = Math . max ( rowCount , cell . getRow ( ) ) ; columnCount = Math . max ( columnCount , cell . getColumn ( ) ) ; } return new SparseImmutableGrid < R > ( rowCount + 1 , columnCount + 1 , cells ) ;
public class BytecodeUtils { /** * Checks if { @ code left } is assignable from { @ code right } , however if we don ' t have information * about one of the types then this returns { @ code failOpen } . */ private static boolean doIsAssignableFrom ( Type left , Type right , boolean failOpen ) { } }
if ( left . equals ( right ) ) { return true ; } if ( left . getSort ( ) != right . getSort ( ) ) { return false ; } if ( left . getSort ( ) != Type . OBJECT ) { return false ; // all other sorts require exact equality ( even arrays ) } // for object types we really need to know type hierarchy information to test for whether // right is assignable to left . Optional < Class < ? > > leftClass = objectTypeToClassCache . getUnchecked ( left ) ; Optional < Class < ? > > rightClass = objectTypeToClassCache . getUnchecked ( right ) ; if ( ! leftClass . isPresent ( ) || ! rightClass . isPresent ( ) ) { // This means one of the types being compared is a generated object . So we can ' t easily check // it . Just delegate responsibility to the verifier . return failOpen ; } return leftClass . get ( ) . isAssignableFrom ( rightClass . get ( ) ) ;
public class DataSet { /** * Clears the outcome matrix setting a new number of labels * @ param labels the number of labels / columns in the outcome matrix * Note that this clears the labels for each example */ @ Override public void setNewNumberOfLabels ( int labels ) { } }
int examples = numExamples ( ) ; INDArray newOutcomes = Nd4j . create ( examples , labels ) ; setLabels ( newOutcomes ) ;
public class StreamableEnumSet { /** * Creates an empty set of the specified type . */ public static < E extends Enum < E > > StreamableEnumSet < E > noneOf ( Class < E > elementType ) { } }
return new StreamableEnumSet < E > ( elementType ) ;
public class DockerFunctions { /** * Only this plugin specific launchers . */ public static List < Descriptor < ComputerLauncher > > getDockerComputerLauncherDescriptors ( ) { } }
List < Descriptor < ComputerLauncher > > launchers = new ArrayList < > ( ) ; launchers . add ( getInstance ( ) . getDescriptor ( DockerComputerSSHLauncher . class ) ) ; launchers . add ( getInstance ( ) . getDescriptor ( DockerComputerJNLPLauncher . class ) ) ; launchers . add ( getInstance ( ) . getDescriptor ( DockerComputerIOLauncher . class ) ) ; return launchers ;
public class MetadataHandler { /** * Enhance from metadata . * @ param batch * the Event Batch * @ return the Event Batch */ public EventBatch enhanceFromMetadata ( final EventBatch < AuditEvent > batch ) { } }
for ( AuditEvent event : batch ) { enhanceFromMetadata ( ( AuditEvent ) event ) ; } return batch ;
public class ApiOvhDedicatedceph { /** * Create one or more new IP ACLs * REST : POST / dedicated / ceph / { serviceName } / acl * @ param serviceName [ required ] ID of cluster * @ param aclList [ required ] List of new ACLs * API beta */ public String serviceName_acl_POST ( String serviceName , String [ ] aclList ) throws IOException { } }
String qPath = "/dedicated/ceph/{serviceName}/acl" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "aclList" , aclList ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , String . class ) ;
public class ConfigHelper { /** * list of image names which should be used */ private static List < ImageConfiguration > filterImages ( String nameFilter , List < ImageConfiguration > imagesToFilter ) { } }
List < ImageConfiguration > ret = new ArrayList < > ( ) ; for ( ImageConfiguration imageConfig : imagesToFilter ) { if ( matchesConfiguredImages ( nameFilter , imageConfig ) ) { ret . add ( imageConfig ) ; } } return ret ;
public class RGraph { /** * Projects a RGraph bitset on the source graph G1. * @ param set RGraph BitSet to project * @ return The associate BitSet in G1 */ public BitSet projectG1 ( BitSet set ) { } }
BitSet projection = new BitSet ( firstGraphSize ) ; RNode xNode = null ; for ( int x = set . nextSetBit ( 0 ) ; x >= 0 ; x = set . nextSetBit ( x + 1 ) ) { xNode = ( RNode ) graph . get ( x ) ; projection . set ( xNode . rMap . id1 ) ; } return projection ;
public class XDSRegistryAuditor { /** * Audits an ITI - 16 Registry SQL Query event for XDS . a Document Registry actors . * @ param eventOutcome The event outcome indicator * @ param consumerUserId The Active Participant UserID for the consumer ( if using WS - Addressing ) * @ param consumerUserName The Active Participant UserName for the consumer ( if using WS - Security / XUA ) * @ param consumerIpAddress The IP Address of the consumer that initiated the transaction * @ param registryEndpointUri The URI of this registry ' s endpoint that received the transaction * @ param adhocQueryRequestPayload The payload of the adhoc query request element * @ param patientId The patient ID queried ( if query pertained to a patient id ) */ public void auditRegistryQueryEvent ( RFC3881EventOutcomeCodes eventOutcome , String consumerUserId , String consumerUserName , String consumerIpAddress , String registryEndpointUri , String adhocQueryRequestPayload , String patientId ) { } }
if ( ! isAuditorEnabled ( ) ) { return ; } auditQueryEvent ( false , new IHETransactionEventTypeCodes . RegistrySQLQuery ( ) , eventOutcome , getAuditSourceId ( ) , getAuditEnterpriseSiteId ( ) , consumerUserId , null , consumerUserName , consumerIpAddress , consumerUserName , consumerUserName , true , registryEndpointUri , getSystemAltUserId ( ) , "" , adhocQueryRequestPayload , "" , patientId , null , null ) ;
public class RelationalOperationsMatrix { /** * Compares the DE - 9I matrix against the scl string . */ private static boolean relationCompare_ ( int [ ] matrix , String scl ) { } }
for ( int i = 0 ; i < 9 ; i ++ ) { switch ( scl . charAt ( i ) ) { case 'T' : assert ( matrix [ i ] != - 2 ) ; if ( matrix [ i ] == - 1 ) return false ; break ; case 'F' : assert ( matrix [ i ] != - 2 ) ; if ( matrix [ i ] != - 1 ) return false ; break ; case '0' : assert ( matrix [ i ] != - 2 ) ; if ( matrix [ i ] != 0 ) return false ; break ; case '1' : assert ( matrix [ i ] != - 2 ) ; if ( matrix [ i ] != 1 ) return false ; break ; case '2' : assert ( matrix [ i ] != - 2 ) ; if ( matrix [ i ] != 2 ) return false ; break ; default : break ; } } return true ;
public class ChannelFrameworkImpl { /** * Create a new ChainData object . * @ param name * @ param type * @ param channels * @ return ChainData * @ throws IncoherentChainException */ protected ChainData createChainData ( String name , FlowType type , ChannelData [ ] channels , Map < Object , Object > properties ) throws IncoherentChainException { } }
return new ChainDataImpl ( name , type , channels , properties ) ;
public class GetDevicePoolCompatibilityResult { /** * Information about incompatible devices . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setIncompatibleDevices ( java . util . Collection ) } or { @ link # withIncompatibleDevices ( java . util . Collection ) } * if you want to override the existing values . * @ param incompatibleDevices * Information about incompatible devices . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetDevicePoolCompatibilityResult withIncompatibleDevices ( DevicePoolCompatibilityResult ... incompatibleDevices ) { } }
if ( this . incompatibleDevices == null ) { setIncompatibleDevices ( new java . util . ArrayList < DevicePoolCompatibilityResult > ( incompatibleDevices . length ) ) ; } for ( DevicePoolCompatibilityResult ele : incompatibleDevices ) { this . incompatibleDevices . add ( ele ) ; } return this ;
public class CryptoServiceSingleton { /** * Converts an array of characters representing hexadecimal values into an array of bytes of those same values . The * returned array will be half the length of the passed array , as it takes two characters to represent any given * byte . An exception is thrown if the passed char array has an odd number of elements . * @ param value An array of characters containing hexadecimal digits * @ return A byte array containing binary data decoded from the supplied char array . * @ throws java . lang . IllegalArgumentException Thrown if an odd number or illegal of characters is supplied */ @ Override public byte [ ] decodeHex ( String value ) { } }
try { return Hex . decodeHex ( value . toCharArray ( ) ) ; } catch ( DecoderException e ) { throw new IllegalArgumentException ( e ) ; }
public class JbcSrcJavaValue { /** * Constructs a JbcSrcJavaValue based on the Expression . The method is used to display helpful * error messages to the user if necessary . It is not invoked . */ static JbcSrcJavaValue of ( Expression expr , Method method , JbcSrcValueErrorReporter reporter ) { } }
checkNotNull ( method ) ; if ( expr instanceof SoyExpression ) { return new JbcSrcJavaValue ( expr , method , /* allowedType = */ ( ( SoyExpression ) expr ) . soyType ( ) , /* constantNull = */ false , /* error = */ false , reporter ) ; } return new JbcSrcJavaValue ( expr , method , /* allowedType = */ null , /* constantNull = */ false , /* error = */ false , reporter ) ;
public class Path3d { /** * Adds a curved segment , defined by two new points , to the path by * drawing a Quadratic curve that intersects both the current * coordinates and the specified coordinates { @ code ( x2 , y2 , z2 ) } , * using the specified point { @ code ( x1 , y1 , z1 ) } as a quadratic * parametric control point . * All coordinates are specified in double precision . * @ param x1 the X coordinate of the quadratic control point * @ param y1 the Y coordinate of the quadratic control point * @ param z1 the Z coordinate of the quadratic control point * @ param x2 the X coordinate of the final end point * @ param y2 the Y coordinate of the final end point * @ param z2 the Z coordinate of the final end point */ public void quadTo ( double x1 , double y1 , double z1 , double x2 , double y2 , double z2 ) { } }
ensureSlots ( true , 6 ) ; this . types [ this . numTypesProperty . get ( ) ] = PathElementType . QUAD_TO ; this . numTypesProperty . set ( this . numTypesProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( x1 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( y1 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( z1 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( x2 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( y2 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . coordsProperty [ this . numCoordsProperty . get ( ) ] . set ( z2 ) ; this . numCoordsProperty . set ( this . numCoordsProperty . get ( ) + 1 ) ; this . isEmptyProperty = null ; this . isPolylineProperty . set ( false ) ; this . graphicalBounds = null ; this . logicalBounds = null ;
public class WorldMapProcessor { /** * Counts a single pair of coordinates in all datasets . * @ param xCoord * @ param yCoord * @ param itemDocument */ private void countCoordinates ( int xCoord , int yCoord , ItemDocument itemDocument ) { } }
for ( String siteKey : itemDocument . getSiteLinks ( ) . keySet ( ) ) { Integer count = this . siteCounts . get ( siteKey ) ; if ( count == null ) { this . siteCounts . put ( siteKey , 1 ) ; } else { this . siteCounts . put ( siteKey , count + 1 ) ; } } for ( ValueMap vm : this . valueMaps ) { vm . countCoordinates ( xCoord , yCoord , itemDocument ) ; }
public class Optional { /** * Invokes mapping function on inner value if present . * @ param mapper mapping function * @ return an { @ code OptionalDouble } with transformed value if present , * otherwise an empty { @ code OptionalDouble } * @ throws NullPointerException if value is present and * { @ code mapper } is { @ code null } * @ since 1.1.4 */ @ NotNull public OptionalDouble mapToDouble ( @ NotNull ToDoubleFunction < ? super T > mapper ) { } }
if ( ! isPresent ( ) ) return OptionalDouble . empty ( ) ; return OptionalDouble . of ( mapper . applyAsDouble ( value ) ) ;
public class Clause { /** * setter for subclauses - sets array of subelements . contains WordForms or Clauses * @ generated * @ param v value to set into the feature */ public void setSubclauses ( FSArray v ) { } }
if ( Clause_Type . featOkTst && ( ( Clause_Type ) jcasType ) . casFeat_subclauses == null ) jcasType . jcas . throwFeatMissing ( "subclauses" , "com.digitalpebble.rasp.Clause" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( Clause_Type ) jcasType ) . casFeatCode_subclauses , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class JMThread { /** * Suspend when null r . * @ param < R > the type parameter * @ param intervalAsMillis the interval as millis * @ param objectSupplier the object supplier * @ return the r */ public static < R > R suspendWhenNull ( long intervalAsMillis , Supplier < R > objectSupplier ) { } }
R object = objectSupplier . get ( ) ; if ( object == null ) { log . warn ( "Start Suspending !!!" ) ; long startTimeMillis = System . currentTimeMillis ( ) ; while ( ( object = objectSupplier . get ( ) ) == null ) sleep ( intervalAsMillis ) ; log . warn ( "Stop Suspending Over {} ms" , System . currentTimeMillis ( ) - startTimeMillis ) ; } return object ;
public class JSONConverter { /** * Encode a NotificationRegistration instance as JSON : * " objectName " : ObjectName , * " filters " : [ NotificationFilter * ] * @ param out The stream to write JSON to * @ param value The NotificationRegistration instance to encode . * Can ' t be null . See writeNotificationFilters ( ) for * requirements on the filters . * @ throws IOException If an I / O error occurs * @ see # readNotificationRegistration ( InputStream ) * @ see # writeNotificationFilters ( OutputStream , NotificationFilter [ ] ) */ public void writeNotificationRegistration ( OutputStream out , NotificationRegistration value ) throws IOException { } }
writeStartObject ( out ) ; writeObjectNameField ( out , OM_OBJECTNAME , value . objectName ) ; writeNotificationFiltersField ( out , OM_FILTERS , value . filters ) ; writeEndObject ( out ) ;
public class CommerceDiscountUtil { /** * Returns the last commerce discount in the ordered set where expirationDate & lt ; & # 63 ; and status = & # 63 ; . * @ param expirationDate the expiration date * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce discount , or < code > null < / code > if a matching commerce discount could not be found */ public static CommerceDiscount fetchByLtE_S_Last ( Date expirationDate , int status , OrderByComparator < CommerceDiscount > orderByComparator ) { } }
return getPersistence ( ) . fetchByLtE_S_Last ( expirationDate , status , orderByComparator ) ;
public class RsaCertificateManager { /** * This method signs a certificate signing request ( CSR ) using the specified certificate * authority ( CA ) . This is a convenience method that really should be part of the * < code > CertificateManagement < / code > interface except that it depends on a Bouncy Castle * class in the signature . The java security framework does not have a similar class so it * has been left out of the interface . * @ param caPrivateKey The private key for the certificate authority . * @ param caCertificate The certificate containing the public key for the certificate authority . * @ param request The certificate signing request ( CSR ) to be signed . * @ param serialNumber The serial number for the new certificate . * @ param lifetime How long the certificate should be valid . * @ return The newly signed certificate . */ public X509Certificate signCertificateRequest ( PrivateKey caPrivateKey , X509Certificate caCertificate , PKCS10CertificationRequest request , BigInteger serialNumber , long lifetime ) { } }
try { logger . entry ( ) ; logger . debug ( "Extract public key and subject from the CSR..." ) ; PublicKey publicKey = new JcaPEMKeyConverter ( ) . getPublicKey ( request . getSubjectPublicKeyInfo ( ) ) ; String subject = request . getSubject ( ) . toString ( ) ; logger . debug ( "Generate and sign the certificate..." ) ; X509Certificate result = createCertificate ( caPrivateKey , caCertificate , publicKey , subject , serialNumber , lifetime ) ; logger . exit ( ) ; return result ; } catch ( PEMException e ) { RuntimeException exception = new RuntimeException ( "An unexpected exception occurred while attempting to sign a certificate." , e ) ; logger . error ( exception . toString ( ) ) ; throw exception ; }
public class AbstractSequentialList { /** * Returns the element at the specified position in this list . * < p > This implementation first gets a list iterator pointing to the * indexed element ( with < tt > listIterator ( index ) < / tt > ) . Then , it gets * the element using < tt > ListIterator . next < / tt > and returns it . * @ throws IndexOutOfBoundsException { @ inheritDoc } */ public E get ( int index ) { } }
try { return listIterator ( index ) . next ( ) ; } catch ( NoSuchElementException exc ) { throw new IndexOutOfBoundsException ( "Index: " + index ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcProtectiveDeviceTrippingUnitTypeEnum ( ) { } }
if ( ifcProtectiveDeviceTrippingUnitTypeEnumEEnum == null ) { ifcProtectiveDeviceTrippingUnitTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1042 ) ; } return ifcProtectiveDeviceTrippingUnitTypeEnumEEnum ;
public class AbstractDecoratedMap { /** * Removes the given entry from the Map . * @ param pEntry the entry to be removed * @ return the removed entry , or { @ code null } if nothing was removed . */ protected Entry < K , V > removeEntry ( Entry < K , V > pEntry ) { } }
if ( pEntry == null ) { return null ; } // Find candidate entry for this key Entry < K , V > candidate = getEntry ( pEntry . getKey ( ) ) ; if ( candidate == pEntry || ( candidate != null && candidate . equals ( pEntry ) ) ) { // Remove remove ( pEntry . getKey ( ) ) ; return pEntry ; } return null ;
public class CodecCipher { /** * Initialize { @ link cipher } by calling one of { @ code Cipher . init } methods * using { @ code mode } and one or some of { @ link # key } , { @ link # params } , * { @ link # spec } , { @ link # certificate } and { @ link # random } . */ private void initCipher ( int mode ) throws InvalidKeyException , InvalidAlgorithmParameterException { } }
if ( key != null ) { if ( params != null ) { if ( random != null ) { cipher . init ( mode , key , params , random ) ; } else { cipher . init ( mode , key , params ) ; } } else if ( spec != null ) { if ( random != null ) { cipher . init ( mode , key , spec , random ) ; } else { cipher . init ( mode , key , spec ) ; } } else { if ( random != null ) { cipher . init ( mode , key , random ) ; } else { cipher . init ( mode , key ) ; } } } else { if ( random != null ) { cipher . init ( mode , certificate , random ) ; } else { cipher . init ( mode , certificate ) ; } }
public class WlpInformation { /** * { @ inheritDoc } */ @ Override public Collection < String > attributesThatCauseBreakingChanges ( ) { } }
// INSTALL was only introduced at 8.5.5.4 so if it ' s set to that won ' t work on earlier clients so force it into a second object if ( Visibility . INSTALL . equals ( this . visibility ) ) { return Collections . singleton ( "visibility" ) ; } return Collections . emptySet ( ) ;
public class ReturnUrl { /** * Get Resource Url for ResendReturnEmail * @ return String Resource Url */ public static MozuUrl resendReturnEmailUrl ( ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/returns/email/resend" ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class Layer { /** * Returns the y - component of the layer ' s origin . */ public float originY ( ) { } }
if ( isSet ( Flag . ODIRTY ) ) { float height = height ( ) ; if ( height > 0 ) { this . originX = origin . ox ( width ( ) ) ; this . originY = origin . oy ( height ) ; setFlag ( Flag . ODIRTY , false ) ; } } return originY ;
public class IdToObjectMap { /** * Removes an object from the map . * @ param key * @ return Object * @ throws SIErrorException if the element did not exist */ public Object remove ( int key ) throws SIErrorException // D214655 { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "remove" , "" + key ) ; captiveComparitorKey . setValue ( key ) ; // Start D214655 Object retObject = map . remove ( captiveComparitorKey ) ; if ( retObject == null ) { // If no object existed this is always an error too throw new SIErrorException ( nls . getFormattedMessage ( "NO_SUCH_KEY_SICO2059" , new Object [ ] { "" + key } , null ) // D256974 ) ; } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "remove" ) ; return retObject ; // End D214655
public class CLI { /** * Main entry point for evaluation . * @ throws IOException * the io exception thrown if errors with paths are present */ public final void eval ( ) throws IOException { } }
final String component = this . parsedArguments . getString ( "component" ) ; final String testFile = this . parsedArguments . getString ( "testSet" ) ; final String model = this . parsedArguments . getString ( "model" ) ; Evaluate evaluator = null ; if ( component . equalsIgnoreCase ( "pos" ) ) { evaluator = new POSEvaluate ( testFile , model ) ; } else { evaluator = new LemmaEvaluate ( testFile , model ) ; } if ( this . parsedArguments . getString ( "evalReport" ) != null ) { if ( this . parsedArguments . getString ( "evalReport" ) . equalsIgnoreCase ( "detailed" ) ) { evaluator . detailEvaluate ( ) ; } else if ( this . parsedArguments . getString ( "evalReport" ) . equalsIgnoreCase ( "error" ) ) { evaluator . evalError ( ) ; } else if ( this . parsedArguments . getString ( "evalReport" ) . equalsIgnoreCase ( "brief" ) ) { evaluator . evaluate ( ) ; } } else { evaluator . evaluate ( ) ; }
public class DatabaseManagerSwing { /** * / * Simple tree node factory method - set ' s parent and user object . */ private DefaultMutableTreeNode makeNode ( Object userObject , MutableTreeNode parent ) { } }
DefaultMutableTreeNode node = new DefaultMutableTreeNode ( userObject ) ; if ( parent != null ) { treeModel . insertNodeInto ( node , parent , parent . getChildCount ( ) ) ; } return node ;
public class HttpUtils { /** * Checks whether the given request should be closed or not once completed . * @ param request the request * @ return { @ code true } if the connection is marked as { @ literal keep - alive } , and so must not be closed . { @ code * false } otherwise . Notice that if not set in the request , the default value depends on the HTTP version . */ public static boolean isKeepAlive ( HttpServerRequest request ) { } }
String connection = request . headers ( ) . get ( HeaderNames . CONNECTION ) ; if ( connection != null && connection . equalsIgnoreCase ( CLOSE ) ) { return false ; } if ( request . version ( ) == HttpVersion . HTTP_1_1 ) { return ! CLOSE . equalsIgnoreCase ( connection ) ; } else { return KEEP_ALIVE . equalsIgnoreCase ( connection ) ; }
public class JCRSQLQueryBuilder { /** * Translates a pattern using the escape character < code > from < / code > into * a pattern using the escape character < code > to < / code > . * @ param pattern the pattern to translate * @ param from the currently used escape character . * @ param to the new escape character to use . * @ return the new pattern using the escape character < code > to < / code > . */ private static String translateEscaping ( String pattern , char from , char to ) { } }
// if escape characters are the same OR pattern does not contain any // escape characters - > simply return pattern as is . if ( from == to || ( pattern . indexOf ( from ) < 0 && pattern . indexOf ( to ) < 0 ) ) { return pattern ; } StringBuilder translated = new StringBuilder ( pattern . length ( ) ) ; boolean escaped = false ; for ( int i = 0 ; i < pattern . length ( ) ; i ++ ) { if ( pattern . charAt ( i ) == from ) { if ( escaped ) { translated . append ( from ) ; escaped = false ; } else { escaped = true ; } } else if ( pattern . charAt ( i ) == to ) { if ( escaped ) { translated . append ( to ) . append ( to ) ; escaped = false ; } else { translated . append ( to ) . append ( to ) ; } } else { if ( escaped ) { translated . append ( to ) ; escaped = false ; } translated . append ( pattern . charAt ( i ) ) ; } } return translated . toString ( ) ;
public class ErrorToken { /** * - - - - - protected methods - - - - - */ protected void addIfNonNull ( final JsonObject obj , final String key , final JsonElement value ) { } }
if ( value != null && ! JsonNull . INSTANCE . equals ( value ) ) { obj . add ( key , value ) ; }
public class IoSessionEventQueue { /** * Send any session event which were queued while waiting for handshaking to complete . * Please note this is an internal method . DO NOT USE it in your code . */ public void flushPendingSessionEvents ( ) throws Exception { } }
synchronized ( sessionEventsQueue ) { IoSessionEvent evt ; while ( ( evt = sessionEventsQueue . poll ( ) ) != null ) { logger . debug ( " Flushing buffered event: {}" , evt ) ; evt . deliverEvent ( ) ; } }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcBoundaryCondition ( ) { } }
if ( ifcBoundaryConditionEClass == null ) { ifcBoundaryConditionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 47 ) ; } return ifcBoundaryConditionEClass ;
public class ALPNOfferedClientHelloExplorer { /** * struct { * ExtensionType extension _ type ; * opaque extension _ data < 0 . . 2 ^ 16-1 > ; * } Extension ; * enum { * server _ name ( 0 ) , max _ fragment _ length ( 1 ) , * client _ certificate _ url ( 2 ) , trusted _ ca _ keys ( 3 ) , * truncated _ hmac ( 4 ) , status _ request ( 5 ) , ( 65535) * } ExtensionType ; */ private static List < Integer > exploreExtensions ( ByteBuffer input , List < Integer > ciphers ) throws SSLException { } }
int length = getInt16 ( input ) ; // length of extensions while ( length > 0 ) { int extType = getInt16 ( input ) ; // extenson type int extLen = getInt16 ( input ) ; // length of extension data if ( extType == 16 ) { // 0x00 : ty return ciphers ; } else { // ignore other extensions processByteVector ( input , extLen ) ; } length -= extLen + 4 ; } return null ;
public class BugChecker { /** * Returns true if the given tree is annotated with a { @ code @ SuppressWarnings } that disables this * bug checker . */ public boolean isSuppressed ( Tree tree ) { } }
SuppressWarnings suppression = ASTHelpers . getAnnotation ( tree , SuppressWarnings . class ) ; return suppression != null && ! Collections . disjoint ( Arrays . asList ( suppression . value ( ) ) , allNames ( ) ) ;
public class MoreFunctions { /** * Creates function that converts inputs to uppercase using provided { @ code locale } * @ param locale to use for case conversion * @ return the function */ public static Function < String , String > toUpperCase ( @ Nonnull Locale locale ) { } }
return new ToUpperCaseFunction ( checkNotNull ( locale ) ) ;