signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LinearGapAlignmentScoring { /** * Returns standard amino acid BLAST scoring * @ param matrix BLAST substitution matrix * @ param gapPenalty penalty for gap , must be < 0 * @ return standard amino acid BLAST scoring */ public static LinearGapAlignmentScoring < AminoAcidSequence > getAminoAcidBLASTScoring ( BLASTMatrix matrix , int gapPenalty ) { } }
return new LinearGapAlignmentScoring < > ( AminoAcidSequence . ALPHABET , matrix . getMatrix ( ) , gapPenalty ) ;
public class ClassFile { /** * Return internal representation of given name , converting ' / ' to ' . ' . * Note : the naming is the inverse of that used by JVMS 4.2 The Internal Form Of Names , * which defines " internal name " to be the form using " / " instead of " . " */ public static byte [ ] internalize ( Name name ) { } }
return internalize ( name . getByteArray ( ) , name . getByteOffset ( ) , name . getByteLength ( ) ) ;
public class MolecularFormulaGenerator { /** * Checks if input parameters are valid and throws an IllegalArgumentException otherwise . */ protected void checkInputParameters ( final IChemObjectBuilder builder , final double minMass , final double maxMass , final MolecularFormulaRange mfRange ) { } }
if ( ( minMass < 0.0 ) || ( maxMass < 0.0 ) ) { throw ( new IllegalArgumentException ( "The minimum and maximum mass values must be >=0" ) ) ; } if ( ( minMass > maxMass ) ) { throw ( new IllegalArgumentException ( "Minimum mass must be <= maximum mass" ) ) ; } if ( ( mfRange == null ) || ( mfRange . getIsotopeCount ( ) == 0 ) ) { throw ( new IllegalArgumentException ( "The MolecularFormulaRange parameter must be non-null and must contain at least one isotope" ) ) ; } // Sort the elements by mass in ascending order . That speeds up // the search . for ( IIsotope isotope : mfRange . isotopes ( ) ) { // Check if exact mass of each isotope is set if ( isotope . getExactMass ( ) == null ) throw new IllegalArgumentException ( "The exact mass value of isotope " + isotope + " is not set" ) ; }
public class CommerceShipmentUtil { /** * Returns the last commerce shipment in the ordered set where groupId = & # 63 ; and status = & # 63 ; . * @ param groupId the group ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce shipment , or < code > null < / code > if a matching commerce shipment could not be found */ public static CommerceShipment fetchByG_S_Last ( long groupId , int status , OrderByComparator < CommerceShipment > orderByComparator ) { } }
return getPersistence ( ) . fetchByG_S_Last ( groupId , status , orderByComparator ) ;
public class FileSystem { /** * Opens an FSDataOutputStream at the indicated Path with write - progress * reporting . Same as create ( ) , except fails if parent directory doesn ' t * already exist . * @ param f the file name to open * @ param permission * @ param overwrite if a file with this name already exists , then if true , * the file will be overwritten , and if false an error will be thrown . * @ param bufferSize the size of the buffer to be used . * @ param replication required block replication for the file . * @ param blockSize * @ param progress * @ param forceSync * @ throws IOException * @ see # setPermission ( Path , FsPermission ) * @ deprecated API only for 0.20 - append */ @ Deprecated public FSDataOutputStream createNonRecursive ( Path f , FsPermission permission , boolean overwrite , int bufferSize , short replication , long blockSize , Progressable progress , boolean forceSync , boolean doParallelWrites , WriteOptions options ) throws IOException { } }
throw new IOException ( "createNonRecursive unsupported for this filesystem" + this . getClass ( ) ) ;
public class HttpPostBindingUtil { /** * Converts an { @ link AggregatedHttpMessage } which is received from the remote entity to * a { @ link SAMLObject } . */ static < T extends SAMLObject > MessageContext < T > toSamlObject ( AggregatedHttpMessage msg , String name ) { } }
final SamlParameters parameters = new SamlParameters ( msg ) ; final byte [ ] decoded ; try { decoded = Base64 . getMimeDecoder ( ) . decode ( parameters . getFirstValue ( name ) ) ; } catch ( IllegalArgumentException e ) { throw new SamlException ( "failed to decode a base64 string of the parameter: " + name , e ) ; } @ SuppressWarnings ( "unchecked" ) final T message = ( T ) deserialize ( decoded ) ; final MessageContext < T > messageContext = new MessageContext < > ( ) ; messageContext . setMessage ( message ) ; final String relayState = parameters . getFirstValueOrNull ( RELAY_STATE ) ; if ( relayState != null ) { final SAMLBindingContext context = messageContext . getSubcontext ( SAMLBindingContext . class , true ) ; assert context != null ; context . setRelayState ( relayState ) ; } return messageContext ;
public class HierarchyGroupMarshaller { /** * Marshall the given parameter object . */ public void marshall ( HierarchyGroup hierarchyGroup , ProtocolMarshaller protocolMarshaller ) { } }
if ( hierarchyGroup == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( hierarchyGroup . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( hierarchyGroup . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( hierarchyGroup . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( hierarchyGroup . getLevelId ( ) , LEVELID_BINDING ) ; protocolMarshaller . marshall ( hierarchyGroup . getHierarchyPath ( ) , HIERARCHYPATH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Kernel1D_F64 { /** * Creates a kernel whose elements are the specified data array and has * the specified width . * @ param data The array who will be the kernel ' s data . Reference is saved . * @ param width The kernel ' s width . * @ param offset Location of the origin in the array * @ return A new kernel . */ public static Kernel1D_F64 wrap ( double data [ ] , int width , int offset ) { } }
Kernel1D_F64 ret = new Kernel1D_F64 ( ) ; ret . data = data ; ret . width = width ; ret . offset = offset ; return ret ;
public class GenerationUtils { /** * Order Id for a initial request should be unique per client ID . This method generates a unique * order Id using the Java UUID class and then convert it to base64 to shorten the length to 22 * characters . Order Id for a subsequent request ( void , rebate , settle ect . ) should use the * order Id of the initial request . * * the order ID uses the Java UUID ( universally unique identifier ) so in theory it may not * be unique but the odds of this are extremely remote ( see * < a href = " http : / / en . wikipedia . org / wiki / Universally _ unique _ identifier # Random _ UUID _ probability _ of _ duplicates " > http : / / en . wikipedia . org / wiki / Universally _ unique _ identifier # Random _ UUID _ probability _ of _ duplicates < / a > ) * @ return orderId as a String */ static public String generateOrderId ( ) { } }
UUID uuid = UUID . randomUUID ( ) ; ByteBuffer bb = ByteBuffer . wrap ( new byte [ 16 ] ) ; bb . putLong ( uuid . getMostSignificantBits ( ) ) ; bb . putLong ( uuid . getLeastSignificantBits ( ) ) ; return Base64 . encodeBase64URLSafeString ( bb . array ( ) ) ;
public class DualCache { /** * Delete the corresponding object in cache . * @ param key is the key of the object . */ public void delete ( String key ) { } }
if ( ! ramMode . equals ( DualCacheRamMode . DISABLE ) ) { ramCacheLru . remove ( key ) ; } if ( ! diskMode . equals ( DualCacheDiskMode . DISABLE ) ) { try { dualCacheLock . lockDiskEntryWrite ( key ) ; diskLruCache . remove ( key ) ; } catch ( IOException e ) { logger . logError ( e ) ; } finally { dualCacheLock . unLockDiskEntryWrite ( key ) ; } }
public class AzkabanJobHelper { /** * Replace project on Azkaban based on Azkaban config . This includes preparing the zip file and uploading it to * Azkaban , setting permissions and schedule . * @ param sessionId Session Id . * @ param azkabanProjectId Project Id . * @ param azkabanProjectConfig Azkaban Project Config . * @ return Project Id . * @ throws IOException */ public static String replaceAzkabanJob ( String sessionId , String azkabanProjectId , AzkabanProjectConfig azkabanProjectConfig ) throws IOException { } }
log . info ( "Replacing zip for Azkaban project: " + azkabanProjectConfig . getAzkabanProjectName ( ) ) ; // Create zip file String zipFilePath = createAzkabanJobZip ( azkabanProjectConfig ) ; log . info ( "Zip file path: " + zipFilePath ) ; // Replace the zip file on Azkaban String projectId = AzkabanAjaxAPIClient . replaceAzkabanProject ( sessionId , zipFilePath , azkabanProjectConfig ) ; log . info ( "Project Id: " + projectId ) ; return projectId ;
public class MessageStoreUtils { /** * Build all items of the collection containing builders . The list must not * contain any null items . * @ param builders List of builders . * @ param < M > The message type . * @ param < F > The field type . * @ param < B > The builder type . * @ return List of messages or null if null input . */ public static < M extends PMessage < M , F > , F extends PField , B extends PMessageBuilder < M , F > > List < M > buildAll ( Collection < B > builders ) { } }
if ( builders == null ) { return null ; } return builders . stream ( ) . map ( PMessageBuilder :: build ) . collect ( Collectors . toList ( ) ) ;
public class NodeUtil { /** * Creates a node representing a qualified name . * @ param name A qualified name ( e . g . " foo " or " foo . bar . baz " ) * @ return A NAME or GETPROP node */ public static Node newQName ( AbstractCompiler compiler , String name ) { } }
int endPos = name . indexOf ( '.' ) ; if ( endPos == - 1 ) { return newName ( compiler , name ) ; } Node node ; String nodeName = name . substring ( 0 , endPos ) ; if ( "this" . equals ( nodeName ) ) { node = IR . thisNode ( ) ; } else if ( "super" . equals ( nodeName ) ) { node = IR . superNode ( ) ; } else { node = newName ( compiler , nodeName ) ; } int startPos ; do { startPos = endPos + 1 ; endPos = name . indexOf ( '.' , startPos ) ; String part = ( endPos == - 1 ? name . substring ( startPos ) : name . substring ( startPos , endPos ) ) ; Node propNode = IR . string ( part ) ; propNode . setLength ( part . length ( ) ) ; if ( compiler . getCodingConvention ( ) . isConstantKey ( part ) ) { propNode . putBooleanProp ( Node . IS_CONSTANT_NAME , true ) ; } int length = node . getLength ( ) + "." . length ( ) + part . length ( ) ; node = IR . getprop ( node , propNode ) ; node . setLength ( length ) ; } while ( endPos != - 1 ) ; return node ;
public class CompactingHashTable { /** * Size of all memory segments owned by the partitions of this hash table excluding the compaction partition * @ return size in bytes */ private long getPartitionSize ( ) { } }
long numSegments = 0 ; for ( InMemoryPartition < T > p : this . partitions ) { numSegments += p . getBlockCount ( ) ; } return numSegments * this . segmentSize ;
public class Maps { /** * Map转换为XML * @ param params Map参数 * @ return XML字符串 */ public static String toXml ( final Map < String , String > params ) { } }
XmlWriters writers = XmlWriters . create ( ) ; for ( Map . Entry < String , String > param : params . entrySet ( ) ) { if ( ! Strings . isNullOrEmpty ( param . getValue ( ) ) ) { writers . element ( param . getKey ( ) , param . getValue ( ) ) ; } } return writers . build ( ) ;
public class CmsSiteBean { /** * Creates a new site object based on the members . < p > * @ return a new site object based on the members */ public CmsSite toCmsSite ( ) { } }
m_siteRoot = m_siteRoot . endsWith ( "/" ) ? m_siteRoot . substring ( 0 , m_siteRoot . length ( ) - 1 ) : m_siteRoot ; CmsSiteMatcher matcher = CmsStringUtil . isNotEmpty ( m_secureUrl ) ? new CmsSiteMatcher ( m_secureUrl ) : null ; CmsSite site = OpenCms . getSiteManager ( ) . getSiteForSiteRoot ( m_siteRoot ) ; CmsUUID uuid = new CmsUUID ( ) ; if ( ( site != null ) && ( site . getSiteMatcher ( ) != null ) ) { uuid = ( CmsUUID ) site . getSiteRootUUID ( ) . clone ( ) ; } String errorPage = CmsStringUtil . isNotEmptyOrWhitespaceOnly ( m_errorPage ) ? m_errorPage : null ; List < CmsSiteMatcher > aliases = new ArrayList < CmsSiteMatcher > ( ) ; for ( String alias : m_aliases ) { CmsSiteMatcher aliasMatcher = new CmsSiteMatcher ( alias ) ; aliasMatcher . setRedirect ( m_redirectAliases . contains ( alias ) ) ; aliases . add ( aliasMatcher ) ; } CmsSite result = new CmsSite ( m_siteRoot , uuid , m_title , new CmsSiteMatcher ( m_server ) , String . valueOf ( m_position ) , errorPage , matcher , m_exclusiveUrl , m_exclusiveError , m_webserver , aliases ) ; result . setParameters ( m_parameters ) ; try { result . setSSLMode ( CmsSSLMode . valueOf ( m_mode ) ) ; } catch ( Exception e ) { result . setSSLMode ( CmsSSLMode . NO ) ; LOG . error ( e . getLocalizedMessage ( ) , e ) ; } return result ;
public class MgmtRestModelMapper { /** * Converts the given repository { @ link ActionType } into a corresponding * { @ link MgmtActionType } . * @ param actionType * the repository representation of the action type * @ return < null > or the REST action type */ public static MgmtActionType convertActionType ( final ActionType actionType ) { } }
if ( actionType == null ) { return null ; } switch ( actionType ) { case SOFT : return MgmtActionType . SOFT ; case FORCED : return MgmtActionType . FORCED ; case TIMEFORCED : return MgmtActionType . TIMEFORCED ; case DOWNLOAD_ONLY : return MgmtActionType . DOWNLOAD_ONLY ; default : throw new IllegalStateException ( "Action Type is not supported" ) ; }
public class AbstractRoller { /** * ( non - Javadoc ) * @ see * org . apache . log4j . appender . FileRollEventSource # fireFileRollEvent ( org . apache * . log4j . appender . FileRollEvent ) */ public final void fireFileRollEvent ( final FileRollEvent fileRollEvent ) { } }
final Object [ ] listeners = this . fileRollEventListeners . toArray ( ) ; for ( int i = 0 ; i < listeners . length ; i ++ ) { final FileRollEventListener listener = ( FileRollEventListener ) listeners [ i ] ; listener . onFileRoll ( fileRollEvent ) ; }
public class LocalDateTime { /** * Get the value of one of the fields of a datetime . * This method gets the value of the specified field . * For example : * < pre > * DateTime dt = new DateTime ( ) ; * int year = dt . get ( DateTimeFieldType . year ( ) ) ; * < / pre > * @ param type a field type , usually obtained from DateTimeFieldType , not null * @ return the value of that field * @ throws IllegalArgumentException if the field type is null */ public int get ( DateTimeFieldType type ) { } }
if ( type == null ) { throw new IllegalArgumentException ( "The DateTimeFieldType must not be null" ) ; } return type . getField ( getChronology ( ) ) . get ( getLocalMillis ( ) ) ;
public class BeansDescriptorImpl { /** * If not already created , a new < code > decorators < / code > element with the given value will be created . * Otherwise , the existing < code > decorators < / code > element will be returned . * @ return a new or existing instance of < code > Decorators < BeansDescriptor > < / code > */ public Decorators < BeansDescriptor > getOrCreateDecorators ( ) { } }
Node node = model . getOrCreate ( "decorators" ) ; Decorators < BeansDescriptor > decorators = new DecoratorsImpl < BeansDescriptor > ( this , "decorators" , model , node ) ; return decorators ;
public class Eval { /** * Converts the user source of a snippet into a Snippet list - - Snippet will * have wrappers . * @ param userSource the source of the snippet * @ return usually a singleton list of Snippet , but may be empty or multiple */ List < Snippet > sourceToSnippetsWithWrappers ( String userSource ) { } }
List < Snippet > snippets = sourceToSnippets ( userSource ) ; for ( Snippet snip : snippets ) { if ( snip . outerWrap ( ) == null ) { snip . setOuterWrap ( ( snip . kind ( ) == Kind . IMPORT ) ? state . outerMap . wrapImport ( snip . guts ( ) , snip ) : state . outerMap . wrapInTrialClass ( snip . guts ( ) ) ) ; } } return snippets ;
public class IcsAbsSpinner { /** * The Adapter is used to provide the data which backs this Spinner . * It also provides methods to transform spinner items based on their position * relative to the selected item . * @ param adapter The SpinnerAdapter to use for this Spinner */ @ Override public void setAdapter ( SpinnerAdapter adapter ) { } }
if ( null != mAdapter ) { mAdapter . unregisterDataSetObserver ( mDataSetObserver ) ; resetList ( ) ; } mAdapter = adapter ; mOldSelectedPosition = INVALID_POSITION ; mOldSelectedRowId = INVALID_ROW_ID ; if ( mAdapter != null ) { mOldItemCount = mItemCount ; mItemCount = mAdapter . getCount ( ) ; checkFocus ( ) ; mDataSetObserver = new AdapterDataSetObserver ( ) ; mAdapter . registerDataSetObserver ( mDataSetObserver ) ; int position = mItemCount > 0 ? 0 : INVALID_POSITION ; setSelectedPositionInt ( position ) ; setNextSelectedPositionInt ( position ) ; if ( mItemCount == 0 ) { // Nothing selected checkSelectionChanged ( ) ; } } else { checkFocus ( ) ; resetList ( ) ; // Nothing selected checkSelectionChanged ( ) ; } requestLayout ( ) ;
public class PrivateKeyReader { /** * Read the private key from a pem file as base64 encoded { @ link String } value . * @ param file * the file ( in * . pem format ) that contains the private key * @ return the base64 encoded { @ link String } value . * @ throws IOException * Signals that an I / O exception has occurred . */ public static String readPemFileAsBase64 ( final File file ) throws IOException { } }
final byte [ ] keyBytes = Files . readAllBytes ( file . toPath ( ) ) ; final String privateKeyPem = new String ( keyBytes ) ; String privateKeyAsBase64String = null ; if ( privateKeyPem . indexOf ( BEGIN_PRIVATE_KEY_PREFIX ) != - 1 ) { // PKCS # 8 format privateKeyAsBase64String = new String ( keyBytes ) . replace ( BEGIN_PRIVATE_KEY_PREFIX , "" ) . replace ( END_PRIVATE_KEY_SUFFIX , "" ) . trim ( ) ; } if ( privateKeyPem . indexOf ( BEGIN_RSA_PRIVATE_KEY_PREFIX ) != - 1 ) { // PKCS # 1 format privateKeyAsBase64String = new String ( keyBytes ) . replace ( BEGIN_RSA_PRIVATE_KEY_PREFIX , "" ) . replace ( END_RSA_PRIVATE_KEY_SUFFIX , "" ) . trim ( ) ; } return privateKeyAsBase64String ;
public class IDivOpAxis { /** * { @ inheritDoc } */ @ Override protected Type getReturnType ( final int mOp1 , final int mOp2 ) throws TTXPathException { } }
Type type1 ; Type type2 ; try { type1 = Type . getType ( mOp1 ) . getPrimitiveBaseType ( ) ; type2 = Type . getType ( mOp2 ) . getPrimitiveBaseType ( ) ; } catch ( final IllegalStateException e ) { throw new XPathError ( ErrorType . XPTY0004 ) ; } if ( type1 . isNumericType ( ) && type2 . isNumericType ( ) ) { return Type . INTEGER ; } else { throw new XPathError ( ErrorType . XPTY0004 ) ; }
public class Fetch { /** * For PUT / POST . */ protected String perform ( String method , String request ) throws IOException { } }
HttpURLConnection connection = ( HttpURLConnection ) from . openConnection ( ) ; connection . setRequestMethod ( method ) ; connection . setDoOutput ( true ) ; if ( contentType == null ) contentType = "text/plain; charset=utf8" ; connection . setRequestProperty ( "Content-Type" , contentType ) ; try ( OutputStream urlOut = connection . getOutputStream ( ) ) { urlOut . write ( request . getBytes ( ) ) ; urlOut . flush ( ) ; InputStream urlIn = connection . getInputStream ( ) ; ByteArrayOutputStream resp = new ByteArrayOutputStream ( ) ; byte [ ] buffer = new byte [ 1024 ] ; int len = urlIn . read ( buffer ) ; while ( len >= 0 ) { resp . write ( buffer , 0 , len ) ; len = urlIn . read ( buffer ) ; } return resp . toString ( ) ; }
public class ZipUtil { /** * Compresses the given entries into a new ZIP file . * @ param entries * ZIP entries added . * @ param zip * new ZIP file created . */ public static void pack ( ZipEntrySource [ ] entries , File zip ) { } }
if ( log . isDebugEnabled ( ) ) { log . debug ( "Creating '{}' from {}." , zip , Arrays . asList ( entries ) ) ; } OutputStream out = null ; try { out = new BufferedOutputStream ( new FileOutputStream ( zip ) ) ; pack ( entries , out , true ) ; } catch ( IOException e ) { throw ZipExceptionUtil . rethrow ( e ) ; } finally { IOUtils . closeQuietly ( out ) ; }
public class ServiceQueue { /** * Gets session access token . * @ return Session access token . */ protected String getToken ( ) { } }
return dataMgr . getSessionDAO ( ) . session ( ) != null ? dataMgr . getSessionDAO ( ) . session ( ) . getAccessToken ( ) : null ;
public class EmbeddedProcessFactory { /** * Create an embedded standalone server with an already established module loader . * @ param moduleLoader the module loader . Cannot be { @ code null } * @ param jbossHomeDir the location of the root of server installation . Cannot be { @ code null } or empty . * @ param cmdargs any additional arguments to pass to the embedded server ( e . g . - b = 192.168.100.10) * @ return the running embedded server . Will not be { @ code null } */ public static StandaloneServer createStandaloneServer ( ModuleLoader moduleLoader , File jbossHomeDir , String ... cmdargs ) { } }
return createStandaloneServer ( Configuration . Builder . of ( jbossHomeDir ) . setCommandArguments ( cmdargs ) . setModuleLoader ( moduleLoader ) . build ( ) ) ;
public class BundledTileSetRepository { /** * documentation inherited from interface */ public int getTileSetId ( String setName ) throws NoSuchTileSetException , PersistenceException { } }
waitForBundles ( ) ; Integer tsid = _namemap . get ( setName ) ; if ( tsid != null ) { return tsid . intValue ( ) ; } throw new NoSuchTileSetException ( setName ) ;
public class ntp_server { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
ntp_server_responses result = ( ntp_server_responses ) service . get_payload_formatter ( ) . string_to_resource ( ntp_server_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ntp_server_response_array ) ; } ntp_server [ ] result_ntp_server = new ntp_server [ result . ntp_server_response_array . length ] ; for ( int i = 0 ; i < result . ntp_server_response_array . length ; i ++ ) { result_ntp_server [ i ] = result . ntp_server_response_array [ i ] . ntp_server [ 0 ] ; } return result_ntp_server ;
public class QuadTreeImpl { /** * Inserts the element and bounding _ box into the Quad _ tree _ impl . * Note that this will invalidate any active iterator on the Quad _ tree _ impl . * Returns an Element _ handle corresponding to the element and bounding _ box . * \ param element The element of the Geometry to be inserted . * \ param bounding _ box The bounding _ box of the Geometry to be inserted . */ int insert ( int element , Envelope2D bounding_box ) { } }
if ( m_root == - 1 ) create_root_ ( ) ; if ( m_b_store_duplicates ) { int success = insert_duplicates_ ( element , bounding_box , 0 , m_extent , m_root , false , - 1 ) ; if ( success != - 1 ) { if ( m_data_extent . isEmpty ( ) ) m_data_extent . setCoords ( bounding_box ) ; else m_data_extent . merge ( bounding_box ) ; } return success ; } int element_handle = insert_ ( element , bounding_box , 0 , m_extent , m_root , false , - 1 ) ; if ( element_handle != - 1 ) { if ( m_data_extent . isEmpty ( ) ) m_data_extent . setCoords ( bounding_box ) ; else m_data_extent . merge ( bounding_box ) ; } return element_handle ;
public class CommerceTierPriceEntryUtil { /** * Returns the commerce tier price entry where companyId = & # 63 ; and externalReferenceCode = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param companyId the company ID * @ param externalReferenceCode the external reference code * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching commerce tier price entry , or < code > null < / code > if a matching commerce tier price entry could not be found */ public static CommerceTierPriceEntry fetchByC_ERC ( long companyId , String externalReferenceCode , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByC_ERC ( companyId , externalReferenceCode , retrieveFromCache ) ;
public class Covers { /** * Returns an iterator for the sequences of a transition cover . Sequences are computed lazily ( i . e . as requested by * the iterators { @ link Iterator # next ( ) next } method . * @ param automaton * the automaton for which the cover should be computed * @ param inputs * the set of input symbols allowed in the cover sequences * @ param < I > * input symbol type * @ return an iterator for the input sequences of the cover . * @ see # transitionCover ( DeterministicAutomaton , Collection , Collection ) */ public static < I > Iterator < Word < I > > transitionCoverIterator ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs ) { } }
return new TransitionCoverIterator < > ( automaton , inputs ) ;
public class PauseableComponentControllerImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . kernel . launch . service . PauseableComponentController # pause ( java . lang . String ) */ @ Override public void pause ( String targets ) throws PauseableComponentControllerRequestFailedException { } }
Tr . info ( tc , "info.server.pause.request.received" , targets ) ; Set < String > foundTargets = new HashSet < String > ( ) ; Set < String > targetList = createTargetList ( targets ) ; if ( targetList . isEmpty ( ) ) { Tr . warning ( tc , "warning.server.pause.invalid.targets" ) ; throw new PauseableComponentControllerRequestFailedException ( Tr . formatMessage ( tc , "warning.server.pause.invalid.targets" ) ) ; } Set < String > failed = new HashSet < String > ( ) ; // Add each pauseable component to this list . If the tracked values get modified // while we are iterating and we start over , skip anyone already in this list Set < PauseableComponent > processedList = new HashSet < PauseableComponent > ( ) ; // Sync with other methods changing / querying states for PauseableComponents synchronized ( this ) { while ( true ) { try { for ( PauseableComponent pauseableComponent : tracker . getTracked ( ) . values ( ) ) { if ( processedList . add ( pauseableComponent ) ) { if ( targetList . contains ( pauseableComponent . getName ( ) ) ) { foundTargets . add ( pauseableComponent . getName ( ) ) ; try { pauseableComponent . pause ( ) ; } catch ( Throwable t ) { // Catch anything and mark a failed Add it to the failed list . failed . add ( pauseableComponent . getName ( ) ) ; } } } } break ; } catch ( Throwable t ) { // Someone modified our list of services . Retry . } } } // Check which ( if any ) targets were not found boolean targetsNotFound = false ; targetList . removeAll ( foundTargets ) ; if ( ! targetList . isEmpty ( ) ) { targetsNotFound = true ; Tr . warning ( tc , "warning.server.pause.missing.targets" , Arrays . toString ( targetList . toArray ( ) ) ) ; } // Check if we had any failures and throw an exception back with a list of failed pauseable components . if ( ! failed . isEmpty ( ) ) { Tr . error ( tc , "error.server.pause.failed" , Arrays . toString ( failed . toArray ( ) ) ) ; throw new PauseableComponentControllerRequestFailedException ( Tr . formatMessage ( tc , "error.server.pause.failed" , Arrays . toString ( failed . toArray ( ) ) ) ) ; } else { Tr . info ( tc , "info.server.pause.request.completed" ) ; if ( targetsNotFound ) { throw new PauseableComponentControllerRequestFailedException ( Tr . formatMessage ( tc , "warning.server.pause.missing.targets" , Arrays . toString ( targetList . toArray ( ) ) ) ) ; } }
public class ScriptContextEngineView { /** * Put the bindings into the ENGINE _ SCOPE of the context . * @ param t Mappings to be stored in this map . * @ throws UnsupportedOperationException if the < tt > putAll < / tt > method is not * supported by this map . * @ throws ClassCastException if the class of a key or value in the specified * map prevents it from being stored in this map . * @ throws IllegalArgumentException some aspect of a key or value in the * specified map prevents it from being stored in this map . * @ throws NullPointerException if the specified map is < tt > null < / tt > , or if * this map does not permit < tt > null < / tt > keys or values , and the specified map * contains < tt > null < / tt > keys or values . */ @ Override public void putAll ( Map < ? extends String , ? extends Object > t ) { } }
context . getBindings ( ENGINE_SCOPE ) . putAll ( t ) ;
public class KeyVaultClientBaseImpl { /** * Retrieves a list of individual key versions with the same key name . * The full key identifier , attributes , and tags are provided in the response . This operation requires the keys / list permission . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; KeyItem & gt ; object */ public Observable < ServiceResponse < Page < KeyItem > > > getKeyVersionsNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return getKeyVersionsNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < KeyItem > > , Observable < ServiceResponse < Page < KeyItem > > > > ( ) { @ Override public Observable < ServiceResponse < Page < KeyItem > > > call ( ServiceResponse < Page < KeyItem > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( getKeyVersionsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class GroupImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case XtextPackage . GROUP__GUARD_CONDITION : return getGuardCondition ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class JarWriter { /** * Writes an entry . The { @ code inputStream } is closed once the entry has been written * @ param entryName the name of the entry * @ param inputStream the stream from which the entry ' s data can be read * @ throws IOException if the write fails */ @ Override public void writeEntry ( String entryName , InputStream inputStream ) throws IOException { } }
JarArchiveEntry entry = new JarArchiveEntry ( entryName ) ; writeEntry ( entry , new InputStreamEntryWriter ( inputStream , true ) ) ;
public class FP64 { /** * Extends this fingerprint by the bytes * < code > bytes [ offset ] . . bytes [ offset + length - 1 ] < / code > . * @ return * the resulting fingerprint . */ public FP64 extend ( byte [ ] bytes , int start , int len ) { } }
int end = start + len ; for ( int i = start ; i < end ; i ++ ) { extend ( bytes [ i ] ) ; } return this ;
public class GrammaticalRelation { /** * Returns < code > true < / code > iff the value of < code > Tree < / code > * node < code > t < / code > matches the < code > sourcePattern < / code > for * this < code > GrammaticalRelation < / code > , indicating that this * < code > GrammaticalRelation < / code > is one that could hold between * < code > Tree < / code > node < code > t < / code > and some other node . */ public boolean isApplicable ( Tree t ) { } }
// System . err . println ( " Testing whether " + sourcePattern + " matches " + ( ( TreeGraphNode ) t ) . toOneLineString ( ) ) ; return ( sourcePattern != null ) && ( t . value ( ) != null ) && sourcePattern . matcher ( t . value ( ) ) . matches ( ) ;
public class CpcSketch { /** * Present the given String as a potential unique item . * The string is converted to a byte array using UTF8 encoding . * If the string is null or empty no update attempt is made and the method returns . * < p > Note : About 2X faster performance can be obtained by first converting the String to a * char [ ] and updating the sketch with that . This bypasses the complexity of the Java UTF _ 8 * encoding . This , of course , will not produce the same internal hash values as updating directly * with a String . So be consistent ! Unioning two sketches , one fed with strings and the other * fed with char [ ] will be meaningless . * @ param datum The given String . */ public void update ( final String datum ) { } }
if ( ( datum == null ) || datum . isEmpty ( ) ) { return ; } final byte [ ] data = datum . getBytes ( UTF_8 ) ; final long [ ] arr = hash ( data , seed ) ; hashUpdate ( arr [ 0 ] , arr [ 1 ] ) ;
public class BigDecimalUtil { /** * Calculate the weight of the constituent and add it to the running weighted value . * runningWeightedVal + valueToAdd * weightForValueToAdd / totalWeight * @ param runningWeightedVal * @ param valueToAdd * @ param weightForValueToAdd * @ param totalWeight * @ return */ public static BigDecimal addWeightedConstituent ( final BigDecimal runningWeightedVal , final BigDecimal valueToAdd , final BigDecimal weightForValueToAdd , final BigDecimal totalWeight ) { } }
return BigDecimalUtil . doAdd ( runningWeightedVal , BigDecimalUtil . divide ( BigDecimalUtil . multiply ( valueToAdd , BigDecimalUtil . abs ( weightForValueToAdd ) ) , BigDecimalUtil . abs ( totalWeight ) , BigDecimal . ROUND_HALF_UP ) ) ;
public class InternalXbaseParser { /** * InternalXbase . g : 2317:1 : ruleXSetLiteral returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) ; */ public final EObject ruleXSetLiteral ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_1 = null ; Token otherlv_2 = null ; Token otherlv_4 = null ; Token otherlv_6 = null ; EObject lv_elements_3_0 = null ; EObject lv_elements_5_0 = null ; enterRule ( ) ; try { // InternalXbase . g : 2323:2 : ( ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) ) // InternalXbase . g : 2324:2 : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) { // InternalXbase . g : 2324:2 : ( ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' ) // InternalXbase . g : 2325:3 : ( ) otherlv _ 1 = ' # ' otherlv _ 2 = ' { ' ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? otherlv _ 6 = ' } ' { // InternalXbase . g : 2325:3 : ( ) // InternalXbase . g : 2326:4: { if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXSetLiteralAccess ( ) . getXSetLiteralAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 51 , FOLLOW_32 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXSetLiteralAccess ( ) . getNumberSignKeyword_1 ( ) ) ; } otherlv_2 = ( Token ) match ( input , 52 , FOLLOW_33 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXSetLiteralAccess ( ) . getLeftCurlyBracketKeyword_2 ( ) ) ; } // InternalXbase . g : 2340:3 : ( ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * ) ? int alt37 = 2 ; int LA37_0 = input . LA ( 1 ) ; if ( ( ( LA37_0 >= RULE_STRING && LA37_0 <= RULE_ID ) || LA37_0 == 19 || ( LA37_0 >= 35 && LA37_0 <= 36 ) || LA37_0 == 41 || LA37_0 == 49 || ( LA37_0 >= 51 && LA37_0 <= 52 ) || LA37_0 == 54 || LA37_0 == 58 || LA37_0 == 60 || ( LA37_0 >= 64 && LA37_0 <= 66 ) || ( LA37_0 >= 69 && LA37_0 <= 81 ) || LA37_0 == 83 ) ) { alt37 = 1 ; } switch ( alt37 ) { case 1 : // InternalXbase . g : 2341:4 : ( ( lv _ elements _ 3_0 = ruleXExpression ) ) ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * { // InternalXbase . g : 2341:4 : ( ( lv _ elements _ 3_0 = ruleXExpression ) ) // InternalXbase . g : 2342:5 : ( lv _ elements _ 3_0 = ruleXExpression ) { // InternalXbase . g : 2342:5 : ( lv _ elements _ 3_0 = ruleXExpression ) // InternalXbase . g : 2343:6 : lv _ elements _ 3_0 = ruleXExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSetLiteralAccess ( ) . getElementsXExpressionParserRuleCall_3_0_0 ( ) ) ; } pushFollow ( FOLLOW_34 ) ; lv_elements_3_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSetLiteralRule ( ) ) ; } add ( current , "elements" , lv_elements_3_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbase . g : 2360:4 : ( otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) ) * loop36 : do { int alt36 = 2 ; int LA36_0 = input . LA ( 1 ) ; if ( ( LA36_0 == 48 ) ) { alt36 = 1 ; } switch ( alt36 ) { case 1 : // InternalXbase . g : 2361:5 : otherlv _ 4 = ' , ' ( ( lv _ elements _ 5_0 = ruleXExpression ) ) { otherlv_4 = ( Token ) match ( input , 48 , FOLLOW_4 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXSetLiteralAccess ( ) . getCommaKeyword_3_1_0 ( ) ) ; } // InternalXbase . g : 2365:5 : ( ( lv _ elements _ 5_0 = ruleXExpression ) ) // InternalXbase . g : 2366:6 : ( lv _ elements _ 5_0 = ruleXExpression ) { // InternalXbase . g : 2366:6 : ( lv _ elements _ 5_0 = ruleXExpression ) // InternalXbase . g : 2367:7 : lv _ elements _ 5_0 = ruleXExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXSetLiteralAccess ( ) . getElementsXExpressionParserRuleCall_3_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_34 ) ; lv_elements_5_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXSetLiteralRule ( ) ) ; } add ( current , "elements" , lv_elements_5_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop36 ; } } while ( true ) ; } break ; } otherlv_6 = ( Token ) match ( input , 53 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_6 , grammarAccess . getXSetLiteralAccess ( ) . getRightCurlyBracketKeyword_4 ( ) ) ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class GeometryColumnsSfSqlDao { /** * { @ inheritDoc } * Update using the complex key */ @ Override public int update ( GeometryColumnsSfSql geometryColumns ) throws SQLException { } }
UpdateBuilder < GeometryColumnsSfSql , TableColumnKey > ub = updateBuilder ( ) ; ub . updateColumnValue ( GeometryColumnsSfSql . COLUMN_GEOMETRY_TYPE , geometryColumns . getGeometryTypeCode ( ) ) ; ub . updateColumnValue ( GeometryColumnsSfSql . COLUMN_COORD_DIMENSION , geometryColumns . getCoordDimension ( ) ) ; ub . updateColumnValue ( GeometryColumnsSfSql . COLUMN_SRID , geometryColumns . getSrid ( ) ) ; ub . where ( ) . eq ( GeometryColumnsSfSql . COLUMN_F_TABLE_NAME , geometryColumns . getFTableName ( ) ) . and ( ) . eq ( GeometryColumnsSfSql . COLUMN_F_GEOMETRY_COLUMN , geometryColumns . getFGeometryColumn ( ) ) ; PreparedUpdate < GeometryColumnsSfSql > update = ub . prepare ( ) ; int updated = update ( update ) ; return updated ;
public class PropertiesManagerCore { /** * Add a property value to all GeoPackages * @ param property * property name * @ param value * value * @ return number of GeoPackages added to */ public int addValue ( String property , String value ) { } }
int count = 0 ; for ( String geoPackage : propertiesMap . keySet ( ) ) { if ( addValue ( geoPackage , property , value ) ) { count ++ ; } } return count ;
public class PutNotificationConfigurationRequest { /** * The type of event that causes the notification to be sent . For more information about notification types * supported by Amazon EC2 Auto Scaling , see < a > DescribeAutoScalingNotificationTypes < / a > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNotificationTypes ( java . util . Collection ) } or { @ link # withNotificationTypes ( java . util . Collection ) } if * you want to override the existing values . * @ param notificationTypes * The type of event that causes the notification to be sent . For more information about notification types * supported by Amazon EC2 Auto Scaling , see < a > DescribeAutoScalingNotificationTypes < / a > . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutNotificationConfigurationRequest withNotificationTypes ( String ... notificationTypes ) { } }
if ( this . notificationTypes == null ) { setNotificationTypes ( new com . amazonaws . internal . SdkInternalList < String > ( notificationTypes . length ) ) ; } for ( String ele : notificationTypes ) { this . notificationTypes . add ( ele ) ; } return this ;
public class BeanHelperCache { /** * Creates a BeanHelper and writes an interface containing its instance . Also , recursively creates * any BeanHelpers on its constrained properties . */ BeanHelper createHelper ( final JClassType pjtype , final TreeLogger plogger , final GeneratorContext pcontext ) throws UnableToCompleteException { } }
final JClassType erasedType = pjtype . getErasedType ( ) ; try { final Class < ? > clazz = Class . forName ( erasedType . getQualifiedBinaryName ( ) ) ; return doCreateHelper ( clazz , erasedType , plogger , pcontext ) ; } catch ( final ClassNotFoundException e ) { plogger . log ( TreeLogger . ERROR , "Unable to create BeanHelper for " + erasedType , e ) ; throw new UnableToCompleteException ( ) ; // NOPMD }
public class ManagedClustersInner { /** * Creates or updates a managed cluster . * Creates or updates a managed cluster with the specified configuration for agents and Kubernetes version . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ param parameters Parameters supplied to the Create or Update a Managed Cluster operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ManagedClusterInner object if successful . */ public ManagedClusterInner beginCreateOrUpdate ( String resourceGroupName , String resourceName , ManagedClusterInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , resourceName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class BooleanValueData { /** * { @ inheritDoc } */ protected boolean internalEquals ( ValueData another ) { } }
if ( another instanceof BooleanValueData ) { return ( ( BooleanValueData ) another ) . value == value ; } return false ;
public class ClassScanner { /** * 使用指定ClassLoader获取一个ClassScanner实例 * @ param classLoader 用于加载查找到的class的ClassLoader * @ return ClassScanner实例 */ public static ClassScanner getInstance ( ClassLoader classLoader ) { } }
ClassScanner scanner = classScannerMap . putIfAbsent ( classLoader , new ClassScanner ( ) ) ; if ( scanner == null ) { scanner = classScannerMap . get ( classLoader ) ; scanner . classLoader = classLoader ; } else { LOGGER . info ( "当前ClassLoader [{}] 对应的ClassScanner已存在,直接返回当前已存在的并且忽略设置ClassLoader" , classLoader ) ; } return scanner ;
public class XmlGraphMLWriter { /** * Creates an instance of the select { @ link GraphMLDecorator } . * @ param result The rule result . * @ return The { @ link GraphMLDecorator } . */ private GraphMLDecorator getGraphMLDecorator ( Result < ? > result ) { } }
String graphMLDecorator = result . getRule ( ) . getReport ( ) . getProperties ( ) . getProperty ( GRAPHML_DECORATOR ) ; Class < ? extends GraphMLDecorator > decoratorClass ; if ( graphMLDecorator != null ) { decoratorClass = classHelper . getType ( graphMLDecorator ) ; } else { decoratorClass = defaultDecoratorClass ; } return classHelper . createInstance ( decoratorClass ) ;
public class DeleteUserAttributesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteUserAttributesRequest deleteUserAttributesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteUserAttributesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteUserAttributesRequest . getUserAttributeNames ( ) , USERATTRIBUTENAMES_BINDING ) ; protocolMarshaller . marshall ( deleteUserAttributesRequest . getAccessToken ( ) , ACCESSTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class br_broker { /** * Use this operation to stop Unified Repeater Instance . */ public static br_broker stop ( nitro_service client , br_broker resource ) throws Exception { } }
return ( ( br_broker [ ] ) resource . perform_operation ( client , "stop" ) ) [ 0 ] ;
public class InstanceValidator { /** * The actual base entry point */ private void validateResource ( List < ValidationMessage > errors , WrapperElement resource , WrapperElement element , StructureDefinition profile , IdStatus idstatus , NodeStack stack ) throws FHIRException { } }
if ( stack == null ) stack = new NodeStack ( element . isXml ( ) ) ; if ( resource == null ) resource = element ; // getting going - either we got a profile , or not . boolean ok = true ; if ( element . isXml ( ) ) { ok = rule ( errors , IssueType . INVALID , element . line ( ) , element . col ( ) , "/" , element . getNamespace ( ) . equals ( FormatUtilities . FHIR_NS ) , "Namespace mismatch - expected '" + FormatUtilities . FHIR_NS + "', found '" + element . getNamespace ( ) + "'" ) ; } if ( ok ) { String resourceName = element . getResourceType ( ) ; if ( profile == null ) { profile = context . fetchResource ( StructureDefinition . class , "http://hl7.org/fhir/StructureDefinition/" + resourceName ) ; ok = rule ( errors , IssueType . INVALID , element . line ( ) , element . col ( ) , stack . addToLiteralPath ( resourceName ) , profile != null , "No profile found for resource type '" + resourceName + "'" ) ; } else { String type = profile . hasConstrainedType ( ) ? profile . getConstrainedType ( ) : profile . getName ( ) ; // special case : we have a bundle , and the profile is not for a bundle . We ' ll try the first entry instead if ( ! type . equals ( resourceName ) && resourceName . equals ( "Bundle" ) ) { WrapperElement first = getFirstEntry ( element ) ; if ( first != null && first . getResourceType ( ) . equals ( type ) ) { element = first ; resourceName = element . getResourceType ( ) ; idstatus = IdStatus . OPTIONAL ; // why ? } } ok = rule ( errors , IssueType . INVALID , - 1 , - 1 , stack . addToLiteralPath ( resourceName ) , type . equals ( resourceName ) , "Specified profile type was '" + profile . getConstrainedType ( ) + "', but resource type was '" + resourceName + "'" ) ; } } if ( ok ) { stack = stack . push ( element , - 1 , profile . getSnapshot ( ) . getElement ( ) . get ( 0 ) , profile . getSnapshot ( ) . getElement ( ) . get ( 0 ) ) ; if ( idstatus == IdStatus . REQUIRED && ( element . getNamedChild ( "id" ) == null ) ) rule ( errors , IssueType . INVALID , element . line ( ) , element . col ( ) , stack . getLiteralPath ( ) , false , "Resource requires an id, but none is present" ) ; else if ( idstatus == IdStatus . PROHIBITED && ( element . getNamedChild ( "id" ) != null ) ) rule ( errors , IssueType . INVALID , element . line ( ) , element . col ( ) , stack . getLiteralPath ( ) , false , "Resource has an id, but none is allowed" ) ; start ( errors , resource , element , profile , stack ) ; // root is both definition and type }
public class ColumnCardinalityCache { /** * Gets the cardinality for each { @ link AccumuloColumnConstraint } . * Given constraints are expected to be indexed ! Who knows what would happen if they weren ' t ! * @ param schema Schema name * @ param table Table name * @ param auths Scan authorizations * @ param idxConstraintRangePairs Mapping of all ranges for a given constraint * @ param earlyReturnThreshold Smallest acceptable cardinality to return early while other tasks complete * @ param pollingDuration Duration for polling the cardinality completion service * @ return An immutable multimap of cardinality to column constraint , sorted by cardinality from smallest to largest * @ throws TableNotFoundException If the metrics table does not exist * @ throws ExecutionException If another error occurs ; I really don ' t even know anymore . */ public Multimap < Long , AccumuloColumnConstraint > getCardinalities ( String schema , String table , Authorizations auths , Multimap < AccumuloColumnConstraint , Range > idxConstraintRangePairs , long earlyReturnThreshold , Duration pollingDuration ) { } }
// Submit tasks to the executor to fetch column cardinality , adding it to the Guava cache if necessary CompletionService < Pair < Long , AccumuloColumnConstraint > > executor = new ExecutorCompletionService < > ( executorService ) ; idxConstraintRangePairs . asMap ( ) . forEach ( ( key , value ) -> executor . submit ( ( ) -> { long cardinality = getColumnCardinality ( schema , table , auths , key . getFamily ( ) , key . getQualifier ( ) , value ) ; LOG . debug ( "Cardinality for column %s is %s" , key . getName ( ) , cardinality ) ; return Pair . of ( cardinality , key ) ; } ) ) ; // Create a multi map sorted by cardinality ListMultimap < Long , AccumuloColumnConstraint > cardinalityToConstraints = MultimapBuilder . treeKeys ( ) . arrayListValues ( ) . build ( ) ; try { boolean earlyReturn = false ; int numTasks = idxConstraintRangePairs . asMap ( ) . entrySet ( ) . size ( ) ; do { // Sleep for the polling duration to allow concurrent tasks to run for this time Thread . sleep ( pollingDuration . toMillis ( ) ) ; // Poll each task , retrieving the result if it is done for ( int i = 0 ; i < numTasks ; ++ i ) { Future < Pair < Long , AccumuloColumnConstraint > > futureCardinality = executor . poll ( ) ; if ( futureCardinality != null && futureCardinality . isDone ( ) ) { Pair < Long , AccumuloColumnConstraint > columnCardinality = futureCardinality . get ( ) ; cardinalityToConstraints . put ( columnCardinality . getLeft ( ) , columnCardinality . getRight ( ) ) ; } } // If the smallest cardinality is present and below the threshold , set the earlyReturn flag Optional < Entry < Long , AccumuloColumnConstraint > > smallestCardinality = cardinalityToConstraints . entries ( ) . stream ( ) . findFirst ( ) ; if ( smallestCardinality . isPresent ( ) ) { if ( smallestCardinality . get ( ) . getKey ( ) <= earlyReturnThreshold ) { LOG . info ( "Cardinality %s, is below threshold. Returning early while other tasks finish" , smallestCardinality ) ; earlyReturn = true ; } } } while ( ! earlyReturn && cardinalityToConstraints . entries ( ) . size ( ) < numTasks ) ; } catch ( ExecutionException | InterruptedException e ) { if ( e instanceof InterruptedException ) { Thread . currentThread ( ) . interrupt ( ) ; } throw new PrestoException ( UNEXPECTED_ACCUMULO_ERROR , "Exception when getting cardinality" , e ) ; } // Create a copy of the cardinalities return ImmutableMultimap . copyOf ( cardinalityToConstraints ) ;
public class LogTemplates { /** * Produces a log template which logs something when stopwatch split is longer than threshold . * @ param delegateLogger Concrete log template * @ param threshold Threshold ( in milliseconds ) , above which logging is enabled * @ return Logger */ public static SplitThresholdLogTemplate whenSplitLongerThanMilliseconds ( LogTemplate < Split > delegateLogger , long threshold ) { } }
return whenSplitLongerThanNanoseconds ( delegateLogger , threshold * SimonClock . NANOS_IN_MILLIS ) ;
public class CommerceAccountPersistenceImpl { /** * Removes all the commerce accounts where userId = & # 63 ; and type = & # 63 ; from the database . * @ param userId the user ID * @ param type the type */ @ Override public void removeByU_T ( long userId , int type ) { } }
for ( CommerceAccount commerceAccount : findByU_T ( userId , type , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceAccount ) ; }
public class AbstractKeyedOutputHandler { /** * Reads query result and converts it into Map of Bean / Map * Values from column , index / name of which was specified via Constructor , would be used as key * @ param outputList Query output * @ return Map of Bean / Map * @ throws org . midao . jdbc . core . exception . MjdbcException */ public Map < K , V > handle ( List < QueryParameters > outputList ) throws MjdbcException { } }
QueryParameters params = null ; Map < K , V > result = new HashMap < K , V > ( ) ; for ( int i = 1 ; i < outputList . size ( ) ; i ++ ) { params = outputList . get ( i ) ; result . put ( this . createKey ( params ) , ( V ) this . createRow ( params ) ) ; } return result ;
public class ParserDQL { /** * < boolean predicand > : = = this | < parenthesized boolean value expression > */ Expression XreadSimpleValueExpressionPrimary ( ) { } }
Expression e ; e = XreadUnsignedValueSpecificationOrNull ( ) ; if ( e != null ) { return e ; } switch ( token . tokenType ) { case Tokens . OPENBRACKET : int position = getPosition ( ) ; read ( ) ; int subqueryPosition = getPosition ( ) ; readOpenBrackets ( ) ; switch ( token . tokenType ) { case Tokens . TABLE : case Tokens . VALUES : case Tokens . SELECT : SubQuery sq = null ; rewind ( subqueryPosition ) ; try { sq = XreadSubqueryBody ( false , OpTypes . SCALAR_SUBQUERY ) ; readThis ( Tokens . CLOSEBRACKET ) ; } catch ( HsqlException ex ) { compileContext . resetSubQueryLevel ( ) ; ex . setLevel ( compileContext . subQueryDepth ) ; if ( lastError == null || lastError . getLevel ( ) < ex . getLevel ( ) ) { lastError = ex ; } rewind ( position ) ; return null ; } // A VoltDB extension to adapt to the hsqldb 2.3.2 change for fuller subquery support . SubQuery td = sq ; // End of VoltDB extension // BEGIN Cherry - picked code change from hsqldb - 2.3.2 if ( td . queryExpression . isSingleColumn ( ) ) { e = new Expression ( OpTypes . SCALAR_SUBQUERY , td ) ; } else { e = new Expression ( OpTypes . ROW_SUBQUERY , td ) ; } return e ; /* Disable 5 lines . . . if ( ! sq . queryExpression . isSingleColumn ( ) ) { throw Error . error ( ErrorCode . W _ 01000 ) ; return new Expression ( OpTypes . SCALAR _ SUBQUERY , sq ) ; . . . disabled 5 lines . */ // END Cherry - picked code change from hsqldb - 2.3.2 default : rewind ( position ) ; return null ; } case Tokens . ASTERISK : e = new ExpressionColumn ( token . namePrePrefix , token . namePrefix ) ; recordExpressionForToken ( ( ExpressionColumn ) e ) ; read ( ) ; return e ; case Tokens . CASEWHEN : return readCaseWhenExpression ( ) ; case Tokens . CASE : return readCaseExpression ( ) ; case Tokens . NULLIF : return readNullIfExpression ( ) ; case Tokens . COALESCE : case Tokens . IFNULL : return readCoalesceExpression ( ) ; case Tokens . CAST : case Tokens . CONVERT : return readCastExpression ( ) ; case Tokens . DATE : case Tokens . TIME : case Tokens . TIMESTAMP : case Tokens . INTERVAL : e = readDateTimeIntervalLiteral ( ) ; if ( e != null ) { return e ; } break ; case Tokens . ANY : case Tokens . SOME : case Tokens . EVERY : case Tokens . COUNT : case Tokens . APPROX_COUNT_DISTINCT : case Tokens . MAX : case Tokens . MIN : case Tokens . SUM : case Tokens . AVG : case Tokens . STDDEV_POP : case Tokens . STDDEV_SAMP : case Tokens . VAR_POP : case Tokens . VAR_SAMP : case Tokens . RANK : case Tokens . DENSE_RANK : case Tokens . ROW_NUMBER : return readAggregate ( ) ; case Tokens . NEXT : return readSequenceExpression ( ) ; case Tokens . LEFT : case Tokens . RIGHT : // CLI function names break ; default : if ( isCoreReservedKey ( ) ) { throw unexpectedToken ( ) ; } } return readColumnOrFunctionExpression ( ) ;
public class Category { /** * Check whether this category is enabled for the { @ code DEBUG } Level . * This function is intended to lessen the computational cost of disabled log debug statements . * For some { @ code cat } Category object , when you write , * < pre > * cat . debug ( " This is entry number : " + i ) ; * < / pre > * You incur the cost constructing the message , concatenatiion in this case , regardless of whether the message is * logged or not . * If you are worried about speed , then you should write * < pre > * if ( cat . isDebugEnabled ( ) ) { * cat . debug ( " This is entry number : " + i ) ; * < / pre > * This way you will not incur the cost of parameter construction if debugging is disabled for { @ code cat } . On the * other hand , if the { @ code cat } is debug enabled , you will incur the cost of evaluating whether the category is * debug enabled twice . Once in { @ code isDebugEnabled } and once in the { @ code debug } . This is an insignificant * overhead since evaluating a category takes about 1 % % of the time it takes to actually log . * @ return boolean - { @ code true } if this category is debug enabled , { @ code false } otherwise . */ public boolean isDebugEnabled ( ) { } }
return MINIMUM_LEVEL_COVERS_DEBUG && provider . isEnabled ( STACKTRACE_DEPTH , null , org . tinylog . Level . DEBUG ) ;
public class TreeReaderRegistry { /** * - - - FACTORY FINDER METHOD - - - */ private static final TreeReader getReader ( String format , boolean throwException ) { } }
TreeReader reader ; if ( format == null ) { reader = cachedJsonReader ; } else { String key = format . toLowerCase ( ) ; if ( JSON . equals ( key ) ) { reader = cachedJsonReader ; } else { reader = readers . get ( key ) ; } } if ( reader != null ) { return reader ; } // Search reader class by system property : // - Ddatatree . json . reader = io . datatree . adapters . JsonBoon // - Ddatatree . xml . reader = io . datatree . adapters . XmlJackson // - Ddatatree . yaml . reader = your . yaml . reader . ClassName // - Ddatatree . csv . reader = your . csv . reader . ClassName // - Ddatatree . properties . reader = your . properties . reader . ClassName // . . . or to use your " custom " format : // - Ddatatree . custom . reader = your . custom . reader . ClassName // Tree node = new Tree ( source , " custom " ) ; String propertyName = "datatree." + format + ".reader" ; try { String className = System . getProperty ( propertyName ) ; if ( className == null || className . isEmpty ( ) ) { className = PackageScanner . findByFormat ( format , true ) ; } if ( className == null || className . isEmpty ( ) ) { throw new IllegalArgumentException ( "System Property \"" + propertyName + "\" not found! " + "This property defines a custom reader class for the \"" + format + "\" format." ) ; } reader = ( TreeReader ) Class . forName ( className ) . newInstance ( ) ; readers . put ( format , reader ) ; return reader ; } catch ( Throwable cause ) { if ( throwException ) { suggestDependency ( format ) ; cause . printStackTrace ( ) ; throw new IllegalArgumentException ( "Unable to create reader for format \"" + format + "\"! Set the -D" + propertyName + "=package.ReaderClass initial parameter to specify the proper reader class." , cause ) ; } } return new JsonBuiltin ( ) ;
public class Message { /** * Sets the message payload data . * @ param data the payload data */ public void setData ( byte [ ] data ) { } }
if ( immutable ) { throw new IllegalStateException ( ERR_MSG_IMMUTABLE ) ; } if ( data == null ) { this . data = null ; } else { setData ( data , 0 , data . length ) ; }
public class RunnerManager { /** * Retrieves the most adequate { @ link JobRunner } for a given { @ link JobInstance } . Throws { @ link JqmRuntimeException } if none was found . * @ param ji * @ return */ JobRunner getRunner ( JobInstance ji ) { } }
if ( runnerCache . containsKey ( ji . getJdId ( ) ) ) { return runnerCache . get ( ji . getJdId ( ) ) ; } for ( JobRunner runner : runners ) { if ( runner . canRun ( ji ) ) { runnerCache . put ( ji . getJdId ( ) , runner ) ; return runner ; } } throw new JqmRuntimeException ( "there is no runner able to run job definition " + ji . getJD ( ) . getApplicationName ( ) ) ;
public class ServletUtils { /** * Returns a mutex object for the given { @ link HttpSession } that can be used * as a lock for a given session . For example , to synchronize lazy * initialization of session scoped objects . * < p > The semantics for locking on an HttpSession object are unspecified , and * servlet containers are free to implement the HttpSession in such a way * that acquiring a lock on the HttpSession itself is not safe . When used * in conjunction with a HttpSessionListener ( such as NetUI ' s * HttpSessionMutexListener ) that puts a mutex object on the session when * the session is created , this method provides a lock that is 100 % safe * to use across servlet containers . If a HttpSessionListener is not * registered in web . xml and there is no object for the given attribute name , * the HttpSession itself is returned as the next best lock . < / p > * @ param httpSession the current session * @ param attributeName the attribute name of the mutex object on the session * @ return a mutex that can be used to serialize operations on the HttpSession */ public static Object getSessionMutex ( HttpSession httpSession , String attributeName ) { } }
assert httpSession != null : "HttpSession must not be null" ; assert attributeName != null : "The attribute name must not be null" ; Object mutex = httpSession . getAttribute ( attributeName ) ; if ( mutex == null ) mutex = httpSession ; assert mutex != null ; if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "Using session lock of type: " + mutex . getClass ( ) ) ; return mutex ;
public class Util { /** * Validate that a method returns no value . * @ param method the method to be tested * @ param errors a list to place the errors */ @ SuppressWarnings ( { } }
"ThrowableInstanceNeverThrown" } ) public static void validateVoid ( Method method , List < Throwable > errors ) { if ( method . getReturnType ( ) != Void . TYPE ) { errors . add ( new Exception ( "Method " + method . getName ( ) + "() should be void" ) ) ; }
public class ThreadLocalRandom { /** * Returns a pseudorandom , uniformly distributed value between the * given least value ( inclusive ) and bound ( exclusive ) . * @ param least the least value returned * @ param bound the upper bound ( exclusive ) * @ return the next value * @ throws IllegalArgumentException if least greater than or equal * to bound */ public double nextDouble ( double least , double bound ) { } }
if ( least >= bound ) { throw new IllegalArgumentException ( ) ; } return nextDouble ( ) * ( bound - least ) + least ;
public class COP { /** * Process a single relation . * @ param relation Relation to process * @ return Outlier detection result */ public OutlierResult run ( Relation < V > relation ) { } }
final DBIDs ids = relation . getDBIDs ( ) ; KNNQuery < V > knnQuery = QueryUtil . getKNNQuery ( relation , getDistanceFunction ( ) , k + 1 ) ; final int dim = RelationUtil . dimensionality ( relation ) ; if ( k <= dim + 1 ) { LOG . warning ( "PCA is underspecified with a too low k! k should be at much larger than " + dim ) ; } WritableDoubleDataStore cop_score = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC ) ; WritableDataStore < double [ ] > cop_err_v = models ? DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC , double [ ] . class ) : null ; WritableIntegerDataStore cop_dim = models ? DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC , - 1 ) : null ; // compute neighbors of each db object FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Correlation Outlier Probabilities" , relation . size ( ) , LOG ) : null ; double [ ] centroid = new double [ dim ] ; double [ ] scores = new double [ dim ] ; ModifiableDBIDs nids = DBIDUtil . newHashSet ( k + 10 ) ; for ( DBIDIter id = ids . iter ( ) ; id . valid ( ) ; id . advance ( ) ) { nids . clear ( ) ; nids . addDBIDs ( knnQuery . getKNNForDBID ( id , k + 1 ) ) ; nids . remove ( id ) ; // Do not use query object computeCentroid ( centroid , relation , nids ) ; PCAResult pcares = pca . processIds ( nids , relation ) ; double [ ] [ ] tevecs = pcares . getEigenvectors ( ) ; double [ ] evs = pcares . getEigenvalues ( ) ; double [ ] projected = times ( tevecs , minusEquals ( relation . get ( id ) . toArray ( ) , centroid ) ) ; if ( dist == DistanceDist . CHISQUARED ) { double sqdevs = 0 ; for ( int d = 0 ; d < dim ; d ++ ) { double dev = projected [ d ] ; // Scale with variance and accumulate sqdevs += dev * dev / evs [ d ] ; scores [ d ] = 1 - ChiSquaredDistribution . cdf ( sqdevs , d + 1 ) ; } } else { assert ( dist == DistanceDist . GAMMA ) ; double [ ] [ ] dists = new double [ dim ] [ nids . size ( ) ] ; int j = 0 ; double [ ] srel = new double [ dim ] ; for ( DBIDIter s = nids . iter ( ) ; s . valid ( ) && j < nids . size ( ) ; s . advance ( ) , j ++ ) { V vec = relation . get ( s ) ; for ( int d = 0 ; d < dim ; d ++ ) { srel [ d ] = vec . doubleValue ( d ) - centroid [ d ] ; } double sqdist = 0.0 ; for ( int d = 0 ; d < dim ; d ++ ) { double serrd = transposeTimes ( tevecs [ d ] , srel ) ; dists [ d ] [ j ] = ( sqdist += serrd * serrd / evs [ d ] ) ; } } double sqdevs = 0 ; for ( int d = 0 ; d < dim ; d ++ ) { // Scale with Stddev final double dev = projected [ d ] ; // Accumulate sqdevs += dev * dev / evs [ d ] ; // Sort , so we can trim the top 15 % below . Arrays . sort ( dists [ d ] ) ; // Evaluate scores [ d ] = 1 - GammaChoiWetteEstimator . STATIC . estimate ( dists [ d ] , SHORTENED_ARRAY ) . cdf ( sqdevs ) ; } } // Find best score double min = Double . POSITIVE_INFINITY ; int vdim = dim - 1 ; for ( int d = 0 ; d < dim ; d ++ ) { double v = scores [ d ] ; if ( v < min ) { min = v ; vdim = d ; } } // Normalize the value final double prob = expect * ( 1 - min ) / ( expect + min ) ; cop_score . putDouble ( id , prob ) ; if ( models ) { // Construct the error vector : Arrays . fill ( projected , vdim + 1 , dim , 0. ) ; cop_err_v . put ( id , timesEquals ( transposeTimes ( tevecs , projected ) , - prob ) ) ; cop_dim . putInt ( id , dim - vdim ) ; } LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; // combine results . DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Correlation Outlier Probabilities" , COP_SCORES , cop_score , ids ) ; OutlierScoreMeta scoreMeta = new ProbabilisticOutlierScore ( ) ; OutlierResult result = new OutlierResult ( scoreMeta , scoreResult ) ; if ( models ) { result . addChildResult ( new MaterializedRelation < > ( "Local Dimensionality" , COP_DIM , TypeUtil . INTEGER , cop_dim , ids ) ) ; result . addChildResult ( new MaterializedRelation < > ( "Error vectors" , COP_ERRORVEC , TypeUtil . DOUBLE_ARRAY , cop_err_v , ids ) ) ; } return result ;
public class JFrmMainFrame { /** * GEN - END : initComponents */ private void jTree1ValueChanged ( javax . swing . event . TreeSelectionEvent evt ) // GEN - FIRST : event _ jTree1ValueChanged { } }
// GEN - HEADEREND : event _ jTree1ValueChanged javax . swing . tree . TreePath tp = evt . getPath ( ) ; if ( tp != null ) { Object o = tp . getLastPathComponent ( ) ; if ( o instanceof PropertySheetModel ) { PropertySheetModel p = ( PropertySheetModel ) o ; PropertySheetView pv = ( PropertySheetView ) hmPropertySheets . get ( p . getPropertySheetClass ( ) ) ; if ( pv == null ) { try { pv = ( PropertySheetView ) p . getPropertySheetClass ( ) . newInstance ( ) ; } catch ( InstantiationException ie ) { // What to do here ? ? ? ? ? ie . printStackTrace ( ) ; } catch ( IllegalAccessException iae ) { iae . printStackTrace ( ) ; } } pv . setModel ( p ) ; this . jScrollPane2 . setViewportView ( ( java . awt . Component ) pv ) ; } }
public class ApiOvhDedicatedserver { /** * Add a new email alert * REST : POST / dedicated / server / { serviceName } / serviceMonitoring / { monitoringId } / alert / email * @ param language [ required ] Alert language * @ param email [ required ] Alert destination * @ param serviceName [ required ] The internal name of your dedicated server * @ param monitoringId [ required ] This monitoring id */ public OvhEmailAlert serviceName_serviceMonitoring_monitoringId_alert_email_POST ( String serviceName , Long monitoringId , String email , OvhAlertLanguageEnum language ) throws IOException { } }
String qPath = "/dedicated/server/{serviceName}/serviceMonitoring/{monitoringId}/alert/email" ; StringBuilder sb = path ( qPath , serviceName , monitoringId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "email" , email ) ; addBody ( o , "language" , language ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhEmailAlert . class ) ;
public class Keyed { /** * Remove this Keyed object , and all subparts . */ public final Futures remove ( Futures fs ) { } }
if ( _key != null ) DKV . remove ( _key , fs ) ; return remove_impl ( fs ) ;
public class TagletWriter { /** * Given an inline tag , return its output . * @ param holder * @ param tagletManager The taglet manager for the current doclet . * @ param holderTag The tag this holds this inline tag . Null if there * is no tag that holds it . * @ param inlineTag The inline tag to be documented . * @ param tagletWriter The taglet writer to write the output . * @ return The output of the inline tag . */ public static Content getInlineTagOutput ( Element holder , TagletManager tagletManager , DocTree holderTag , DocTree inlineTag , TagletWriter tagletWriter ) { } }
List < Taglet > definedTags = tagletManager . getInlineCustomTaglets ( ) ; CommentHelper ch = tagletWriter . configuration ( ) . utils . getCommentHelper ( holder ) ; final String inlineTagName = ch . getTagName ( inlineTag ) ; // This is a custom inline tag . for ( Taglet definedTag : definedTags ) { if ( ( definedTag . getName ( ) ) . equals ( inlineTagName ) ) { // Given a name of a seen custom tag , remove it from the // set of unseen custom tags . tagletManager . seenCustomTag ( definedTag . getName ( ) ) ; Content output = definedTag . getTagletOutput ( holder , holderTag != null && definedTag . getName ( ) . equals ( "inheritDoc" ) ? holderTag : inlineTag , tagletWriter ) ; return output ; } } return null ;
public class ListManagementTermListsImpl { /** * gets all the Term Lists . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; TermList & gt ; object */ public Observable < ServiceResponse < List < TermList > > > getAllTermListsWithServiceResponseAsync ( ) { } }
if ( this . client . baseUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.baseUrl() is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{baseUrl}" , this . client . baseUrl ( ) ) ; return service . getAllTermLists ( this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < TermList > > > > ( ) { @ Override public Observable < ServiceResponse < List < TermList > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < List < TermList > > clientResponse = getAllTermListsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class IEEE754rUtil { /** * Gets the minimum of two < code > float < / code > values . * NaN is only returned if all numbers are NaN as per IEEE - 754r . * @ param a * value 1 * @ param b * value 2 * @ return the smallest of the values */ public static float min ( final float a , final float b ) { } }
if ( Float . isNaN ( a ) ) { return b ; } else if ( Float . isNaN ( b ) ) { return a ; } else { return Math . min ( a , b ) ; }
public class ZipkinManager { /** * Retrieves a newly generated random long . * @ return A newly generated random long */ public static long getRandomLong ( ) { } }
byte [ ] rndBytes = new byte [ 8 ] ; SECURE_RANDOM_TL . get ( ) . nextBytes ( rndBytes ) ; return ByteBuffer . wrap ( rndBytes ) . getLong ( ) ;
public class EitherT { /** * { @ inheritDoc } */ @ Override public < R2 > EitherT < M , L , R2 > pure ( R2 r2 ) { } }
return eitherT ( melr . pure ( right ( r2 ) ) ) ;
public class FontFactoryImp { /** * Constructs a < CODE > Font < / CODE > - object . * @ paramfontname the name of the font * @ paramencoding the encoding of the font * @ param embedded true if the font is to be embedded in the PDF * @ paramsize the size of this font * @ paramstyle the style of this font * @ paramcolor the < CODE > Color < / CODE > of this font . * @ return the Font constructed based on the parameters */ public Font getFont ( String fontname , String encoding , boolean embedded , float size , int style , Color color ) { } }
return getFont ( fontname , encoding , embedded , size , style , color , true ) ;
public class CreateDataSourceFromRDSRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateDataSourceFromRDSRequest createDataSourceFromRDSRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createDataSourceFromRDSRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createDataSourceFromRDSRequest . getDataSourceId ( ) , DATASOURCEID_BINDING ) ; protocolMarshaller . marshall ( createDataSourceFromRDSRequest . getDataSourceName ( ) , DATASOURCENAME_BINDING ) ; protocolMarshaller . marshall ( createDataSourceFromRDSRequest . getRDSData ( ) , RDSDATA_BINDING ) ; protocolMarshaller . marshall ( createDataSourceFromRDSRequest . getRoleARN ( ) , ROLEARN_BINDING ) ; protocolMarshaller . marshall ( createDataSourceFromRDSRequest . getComputeStatistics ( ) , COMPUTESTATISTICS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PackageWriterImpl { /** * { @ inheritDoc } */ @ Override public void addPackageDescription ( Content packageContentTree ) { } }
if ( ! utils . getBody ( packageElement ) . isEmpty ( ) ) { Content tree = configuration . allowTag ( HtmlTag . SECTION ) ? sectionTree : packageContentTree ; addDeprecationInfo ( tree ) ; addInlineComment ( packageElement , tree ) ; }
public class DescribeAutoScalingGroupsRequest { /** * The names of the Auto Scaling groups . Each name can be a maximum of 1600 characters . By default , you can only * specify up to 50 names . You can optionally increase this limit using the < code > MaxRecords < / code > parameter . * If you omit this parameter , all Auto Scaling groups are described . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setAutoScalingGroupNames ( java . util . Collection ) } or * { @ link # withAutoScalingGroupNames ( java . util . Collection ) } if you want to override the existing values . * @ param autoScalingGroupNames * The names of the Auto Scaling groups . Each name can be a maximum of 1600 characters . By default , you can * only specify up to 50 names . You can optionally increase this limit using the < code > MaxRecords < / code > * parameter . < / p > * If you omit this parameter , all Auto Scaling groups are described . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeAutoScalingGroupsRequest withAutoScalingGroupNames ( String ... autoScalingGroupNames ) { } }
if ( this . autoScalingGroupNames == null ) { setAutoScalingGroupNames ( new com . amazonaws . internal . SdkInternalList < String > ( autoScalingGroupNames . length ) ) ; } for ( String ele : autoScalingGroupNames ) { this . autoScalingGroupNames . add ( ele ) ; } return this ;
public class AtomicInitializer { /** * Returns the object managed by this initializer . The object is created if * it is not available yet and stored internally . This method always returns * the same object . * @ return the object created by this { @ code AtomicInitializer } * @ throws ConcurrentException if an error occurred during initialization of * the object */ @ Override public T get ( ) throws ConcurrentException { } }
T result = reference . get ( ) ; if ( result == null ) { result = initialize ( ) ; if ( ! reference . compareAndSet ( null , result ) ) { // another thread has initialized the reference result = reference . get ( ) ; } } return result ;
public class RepositoryQueryManager { /** * Obtain the query engine , which is created lazily and in a thread - safe manner . * @ return the query engine ; never null */ protected final QueryEngine queryEngine ( ) { } }
if ( queryEngine == null ) { try { engineInitLock . lock ( ) ; if ( queryEngine == null ) { QueryEngineBuilder builder = null ; if ( ! repoConfig . getIndexProviders ( ) . isEmpty ( ) ) { // There is at least one index provider . . . builder = IndexQueryEngine . builder ( ) ; logger . debug ( "Queries with indexes are enabled for the '{0}' repository. Executing queries may require scanning the repository contents when the query cannot use the defined indexes." , repoConfig . getName ( ) ) ; } else { // There are no indexes . . . builder = ScanningQueryEngine . builder ( ) ; logger . debug ( "Queries with no indexes are enabled for the '{0}' repository. Executing queries will always scan the repository contents." , repoConfig . getName ( ) ) ; } queryEngine = builder . using ( repoConfig , indexManager , runningState . context ( ) ) . build ( ) ; } } finally { engineInitLock . unlock ( ) ; } } return queryEngine ;
public class ExtendedIdentifiers { /** * Creates an Extended Identifier . * @ param namespaceUri * URI of the namespace of the inner XML of the Extended Identifier * @ param namespacePrefix * prefix of the namespace of the inner XML of the Extended Identifier * @ param identifierName * the name value of the Extended Identifier ( will be the root node of the inner XML ) * @ param attributeValue * a value for the attribute < i > name < / i > * @ param administrativeDomain * the value of the administrativeDomain attribute * @ return an { @ link Identity } instance encapsulating the Extended Identifier */ public static Identity createExtendedIdentifier ( String namespaceUri , String namespacePrefix , String identifierName , String attributeValue , String administrativeDomain ) { } }
Document doc = mDocumentBuilder . newDocument ( ) ; Element e = doc . createElementNS ( namespaceUri , namespacePrefix + ":" + identifierName ) ; if ( attributeValue != null ) { e . setAttribute ( "name" , attributeValue ) ; } e . setAttribute ( "administrative-domain" , administrativeDomain ) ; doc . appendChild ( e ) ; try { return Identifiers . createExtendedIdentity ( doc ) ; } catch ( MarshalException e1 ) { IfmapJLog . error ( "document that contains the extended identifier can't be handled" ) ; throw new RuntimeException ( e1 ) ; }
public class SloppyMath { /** * max ( ) that works on three integers . Like many of the other max ( ) functions in this class , * doesn ' t perform special checks like NaN or - 0.0f to save time . * @ return The maximum of three int values . */ public static int max ( int a , int b , int c ) { } }
int ma ; ma = a ; if ( b > ma ) { ma = b ; } if ( c > ma ) { ma = c ; } return ma ;
public class Main { /** * This function sets all odd bits of a number to 1. * @ param num : Integer input for which all odd bits are to be set . * @ return Modified number with all odd bits set . * Example Usage : * > > > set _ odd _ bits ( 10) * 15 * > > > set _ odd _ bits ( 20) * 21 * > > > set _ odd _ bits ( 30) * 31 */ public static int setOddBits ( int num ) { } }
int bitsCount = 0 ; int result = 0 ; int tempNum = num ; while ( tempNum > 0 ) { if ( ( bitsCount % 2 ) == 0 ) { result |= ( 1 << bitsCount ) ; } tempNum >>= 1 ; bitsCount += 1 ; } return ( num | result ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link BigInteger } * { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "maxItems" , scope = GetRenditions . class ) public JAXBElement < BigInteger > createGetRenditionsMaxItems ( BigInteger value ) { } }
return new JAXBElement < BigInteger > ( _GetTypeChildrenMaxItems_QNAME , BigInteger . class , GetRenditions . class , value ) ;
public class ImgUtil { /** * 图像切割 ( 按指定起点坐标和宽高切割 ) * @ param srcImgFile 源图像文件 * @ param destImgFile 切片后的图像文件 * @ param rectangle 矩形对象 , 表示矩形区域的x , y , width , height * @ since 3.1.0 */ public static void cut ( File srcImgFile , File destImgFile , Rectangle rectangle ) { } }
cut ( read ( srcImgFile ) , destImgFile , rectangle ) ;
public class DatastreamFilenameHelper { /** * Get a filename extension for a datastream based on mime - type to extension mapping . * mappingType may be : * < li > never : never look up extension * < li > ifmissing : if the given filename already contains an extension return nothing , * otherwise look up an extension * < li > always : always look up an extension */ private static final String getExtension ( String filename , String mappingType , String MIMETYPE ) throws Exception { } }
String extension = "" ; if ( mappingType . equals ( "never" ) ) { extension = "" ; } else { // if mapping specifies ifmissing and filename contains an extension ; extension is " " ( filename already contains the extension ) if ( mappingType . equals ( "ifmissing" ) && filename . contains ( "." ) ) { extension = "" ; } else { // oth if ( mappingType . equals ( "ifmissing" ) || mappingType . equals ( "always" ) ) { // look up extension from mapping extension = getExtension ( MIMETYPE ) ; // if not found in mappings , use the default if ( extension . isEmpty ( ) ) extension = m_datastreamDefaultExtension ; } else { // unknown mapping type logger . warn ( "Unknown extension mapping type specified in fedora.fcfg" ) ; extension = m_datastreamDefaultExtension ; } } } return extension ;
public class CodedInput { /** * Reads and discards { @ code size } bytes . * @ throws ProtobufException The end of the stream or the current * limit was reached . */ public void skipRawBytes ( final int size ) throws IOException { } }
if ( size < 0 ) { throw ProtobufException . negativeSize ( ) ; } if ( totalBytesRetired + bufferPos + size > currentLimit ) { // Read to the end of the stream anyway . skipRawBytes ( currentLimit - totalBytesRetired - bufferPos ) ; // Then fail . throw ProtobufException . truncatedMessage ( ) ; } if ( size <= bufferSize - bufferPos ) { // We have all the bytes we need already . bufferPos += size ; } else { // Skipping more bytes than are in the buffer . First skip what we have . int pos = bufferSize - bufferPos ; bufferPos = bufferSize ; // Keep refilling the buffer until we get to the point we wanted to skip // to . This has the side effect of ensuring the limits are updated // correctly . refillBuffer ( true ) ; while ( size - pos > bufferSize ) { pos += bufferSize ; bufferPos = bufferSize ; refillBuffer ( true ) ; } bufferPos = size - pos ; }
public class ShardMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Shard shard , ProtocolMarshaller protocolMarshaller ) { } }
if ( shard == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( shard . getShardId ( ) , SHARDID_BINDING ) ; protocolMarshaller . marshall ( shard . getSequenceNumberRange ( ) , SEQUENCENUMBERRANGE_BINDING ) ; protocolMarshaller . marshall ( shard . getParentShardId ( ) , PARENTSHARDID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BsDataConfig { @ Override protected String doBuildColumnString ( String dm ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( dm ) . append ( available ) ; sb . append ( dm ) . append ( boost ) ; sb . append ( dm ) . append ( createdBy ) ; sb . append ( dm ) . append ( createdTime ) ; sb . append ( dm ) . append ( description ) ; sb . append ( dm ) . append ( handlerName ) ; sb . append ( dm ) . append ( handlerParameter ) ; sb . append ( dm ) . append ( handlerScript ) ; sb . append ( dm ) . append ( name ) ; sb . append ( dm ) . append ( permissions ) ; sb . append ( dm ) . append ( sortOrder ) ; sb . append ( dm ) . append ( updatedBy ) ; sb . append ( dm ) . append ( updatedTime ) ; sb . append ( dm ) . append ( virtualHosts ) ; if ( sb . length ( ) > dm . length ( ) ) { sb . delete ( 0 , dm . length ( ) ) ; } sb . insert ( 0 , "{" ) . append ( "}" ) ; return sb . toString ( ) ;
public class AnyValueMap { /** * Converts map element into an AnyValue or returns an empty AnyValue if * conversion is not possible . * @ param key a key of element to get . * @ return AnyValue value of the element or empty AnyValue if conversion is not * supported . * @ see AnyValue * @ see AnyValue # AnyValue ( Object ) */ public AnyValue getAsValue ( String key ) { } }
Object value = getAsObject ( key ) ; return new AnyValue ( value ) ;
public class GithubPagesPublisher { private void gitCommand ( Path temporaryDir , String ... command ) throws Exception { } }
// remove sensitive credentials from command before printing to console String displayedCommand = String . join ( " " , command ) . replaceAll ( getRemoteUrl ( ) , getDisplayedRemoteUrl ( ) ) ; Clog . d ( "Github Pages GIT: {}" , displayedCommand ) ; // but pass directly to system shell to execute execGitCommand ( temporaryDir , command ) ;
public class AbstractGenericHandler { /** * Helper method to complete the request span , called from child instances . * @ param request the corresponding request . */ protected void completeRequestSpan ( final CouchbaseRequest request ) { } }
if ( request != null && request . span ( ) != null ) { if ( env ( ) . operationTracingEnabled ( ) ) { env ( ) . tracer ( ) . scopeManager ( ) . activate ( request . span ( ) , true ) . close ( ) ; } }
public class StringUtil { /** * / * - - - - - [ Literal ] - - - - - */ public static String toLiteral ( char ch , boolean useRaw ) { } }
if ( ch == '\'' ) return "\\'" ; else if ( ch == '"' ) return "\"" ; else return StringUtil . toLiteral ( String . valueOf ( ch ) , useRaw ) ;
public class RebalanceUtils { /** * Confirms that both clusters have the same number of total partitions . * @ param lhs * @ param rhs */ public static void validateClusterPartitionCounts ( final Cluster lhs , final Cluster rhs ) { } }
if ( lhs . getNumberOfPartitions ( ) != rhs . getNumberOfPartitions ( ) ) throw new VoldemortException ( "Total number of partitions should be equal [ lhs cluster (" + lhs . getNumberOfPartitions ( ) + ") not equal to rhs cluster (" + rhs . getNumberOfPartitions ( ) + ") ]" ) ;
public class DefaultComponentManagerManager { /** * Create a new { @ link ComponentManager } for the provided id . * @ param namespace the identifier of the component manager * @ return a new { @ link ComponentManager } instance */ private ComponentManager createComponentManager ( String namespace ) { } }
String prefix = NamespaceUtils . getPrefix ( namespace ) ; ComponentManagerFactory componentManagerFactory ; try { componentManagerFactory = this . rootComponentManager . getInstance ( ComponentManagerFactory . class , prefix ) ; } catch ( ComponentLookupException e ) { componentManagerFactory = this . defaultComponentManagerFactory ; } return componentManagerFactory . createComponentManager ( namespace , this . rootComponentManager ) ;
public class FullDTDReader { /** * Method similar to { @ link # skipPI } , but one that does basic * well - formedness checks . */ protected void readPI ( ) throws XMLStreamException { } }
String target = parseFullName ( ) ; if ( target . length ( ) == 0 ) { _reportWFCViolation ( ErrorConsts . ERR_WF_PI_MISSING_TARGET ) ; } if ( target . equalsIgnoreCase ( "xml" ) ) { _reportWFCViolation ( ErrorConsts . ERR_WF_PI_XML_TARGET , target ) ; } char c = dtdNextFromCurr ( ) ; // Ok , need a space between target and data nonetheless if ( ! isSpaceChar ( c ) ) { // except if it ends right away if ( c != '?' || dtdNextFromCurr ( ) != '>' ) { throwUnexpectedChar ( c , ErrorConsts . ERR_WF_PI_XML_MISSING_SPACE ) ; } if ( mEventListener != null ) { mEventListener . dtdProcessingInstruction ( target , "" ) ; } } else if ( mEventListener == null ) { /* Otherwise , not that much to check since we don ' t care about * the contents . */ while ( true ) { c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : dtdNextFromCurr ( ) ; if ( c == '?' ) { do { c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : dtdNextFromCurr ( ) ; } while ( c == '?' ) ; if ( c == '>' ) { break ; } } if ( c < CHAR_SPACE ) { if ( c == '\n' || c == '\r' ) { skipCRLF ( c ) ; } else if ( c != '\t' ) { throwInvalidSpace ( c ) ; } } } } else { // 24 - Nov - 2006 , TSa : Actually , someone does care . . . // First , need to skip extra space ( if any ) while ( c <= CHAR_SPACE ) { if ( c == '\n' || c == '\r' ) { skipCRLF ( c ) ; } else if ( c != '\t' && c != ' ' ) { throwInvalidSpace ( c ) ; } c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : dtdNextFromCurr ( ) ; } TextBuffer tb = getTextBuffer ( ) ; char [ ] outBuf = tb . getCurrentSegment ( ) ; int outPtr = 0 ; while ( true ) { if ( c == '?' ) { while ( true ) { c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : dtdNextFromCurr ( ) ; if ( c != '?' ) { break ; } if ( outPtr >= outBuf . length ) { outBuf = tb . finishCurrentSegment ( ) ; outPtr = 0 ; } outBuf [ outPtr ++ ] = c ; } if ( c == '>' ) { break ; } // Need to push back char that follows ' ? ' , output ' ? ' -- mInputPtr ; c = '?' ; } else if ( c < CHAR_SPACE ) { if ( c == '\n' || c == '\r' ) { skipCRLF ( c ) ; c = '\n' ; } else if ( c != '\t' ) { throwInvalidSpace ( c ) ; } } // Need more room ? if ( outPtr >= outBuf . length ) { outBuf = tb . finishCurrentSegment ( ) ; outPtr = 0 ; } // Ok , let ' s add char to output : outBuf [ outPtr ++ ] = c ; c = ( mInputPtr < mInputEnd ) ? mInputBuffer [ mInputPtr ++ ] : dtdNextFromCurr ( ) ; } tb . setCurrentLength ( outPtr ) ; String data = tb . contentsAsString ( ) ; mEventListener . dtdProcessingInstruction ( target , data ) ; }
public class JMPathOperation { /** * Delete boolean . * @ param targetPath the target path * @ return the boolean */ public static boolean delete ( Path targetPath ) { } }
debug ( log , "delete" , targetPath ) ; try { Files . delete ( targetPath ) ; return true ; } catch ( Exception e ) { return JMExceptionManager . handleExceptionAndReturnFalse ( log , e , "delete" , targetPath ) ; }
public class Timestamp { /** * Calculates the time passed from the reference to this timeStamp . * The result is the relative time from the reference to this * timestamp , so that reference + result = this . * @ param reference another time stamp * @ return the duration from the reference to this */ public TimeDuration durationFrom ( Timestamp reference ) { } }
long nanoSecDiff = nanoSec - reference . nanoSec ; nanoSecDiff += ( unixSec - reference . unixSec ) * 1000000000 ; return TimeDuration . ofNanos ( nanoSecDiff ) ;
public class EvaluationEngineImpl { /** * ( non - Javadoc ) * @ see org . fcrepo . server . security . xacml . pep . EvaluationEngine # evaluate ( java . lang . String ) */ @ Override public String evaluate ( String request ) throws PEPException { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "evaluating String request" ) ; } String [ ] requests = new String [ ] { request } ; return evaluate ( requests ) ;